repo_name
stringlengths
1
52
repo_creator
stringclasses
6 values
programming_language
stringclasses
4 values
code
stringlengths
0
9.68M
num_lines
int64
1
234k
orrb
openai
C#
using System; using System.IO; using System.Text; using UnityEngine; using UnityEngine.Rendering.PostProcessing; namespace UnityEditor.Rendering.PostProcessing { // CUBE lut specs: // http://wwwimages.adobe.com/content/dam/Adobe/en/products/speedgrade/cc/pdfs/cube-lut-specification-1.0.pdf static class CubeLutAssetFactory { const int kVersion = 1; const int kSize = 33; #if POSTFX_DEBUG_MENUS [MenuItem("Tools/Post-processing/Create Utility Luts")] #endif static void CreateLuts() { Dump("Linear to Unity Log r" + kVersion, ColorUtilities.LinearToLogC); Dump("Unity Log to Linear r" + kVersion, ColorUtilities.LogCToLinear); Dump("sRGB to Unity Log r" + kVersion, x => ColorUtilities.LinearToLogC(Mathf.GammaToLinearSpace(x))); Dump("Unity Log to sRGB r" + kVersion, x => Mathf.LinearToGammaSpace(ColorUtilities.LogCToLinear(x))); Dump("Linear to sRGB r" + kVersion, Mathf.LinearToGammaSpace); Dump("sRGB to Linear r" + kVersion, Mathf.GammaToLinearSpace); AssetDatabase.Refresh(); } static void Dump(string title, Func<float, float> eval) { var sb = new StringBuilder(); sb.AppendFormat("TITLE \"{0}\"\n", title); sb.AppendFormat("LUT_3D_SIZE {0}\n", kSize); sb.AppendFormat("DOMAIN_MIN {0} {0} {0}\n", 0f); sb.AppendFormat("DOMAIN_MAX {0} {0} {0}\n", 1f); const float kSizeMinusOne = (float)kSize - 1f; for (int x = 0; x < kSize; x++) for (int y = 0; y < kSize; y++) for (int z = 0; z < kSize; z++) { float ox = eval((float)x / kSizeMinusOne); float oy = eval((float)y / kSizeMinusOne); float oz = eval((float)z / kSizeMinusOne); // Resolve & Photoshop use BGR as default, let's make it easier for users sb.AppendFormat("{0} {1} {2}\n", oz, oy, ox); } var content = sb.ToString(); var path = Path.Combine(Application.dataPath, string.Format("{0}.cube", title)); File.WriteAllText(path, content); } } }
59
orrb
openai
C#
using System.Collections.Generic; using System.IO; using System.Text; using UnityEngine; namespace UnityEditor.Rendering.PostProcessing { sealed class CubeLutAssetImporter : AssetPostprocessor { static List<string> s_Excluded = new List<string>() { "Linear to sRGB r1", "Linear to Unity Log r1", "sRGB to Linear r1", "sRGB to Unity Log r1", "Unity Log to Linear r1", "Unity Log to sRGB r1" }; static void OnPostprocessAllAssets(string[] imported, string[] deleted, string[] moved, string[] movedFrom) { foreach (string path in imported) { string ext = Path.GetExtension(path); string filename = Path.GetFileNameWithoutExtension(path); if (string.IsNullOrEmpty(ext) || s_Excluded.Contains(filename)) continue; ext = ext.ToLowerInvariant(); if (ext.Equals(".cube")) ImportCubeLut(path); } } // Basic CUBE lut parser // Specs: http://wwwimages.adobe.com/content/dam/Adobe/en/products/speedgrade/cc/pdfs/cube-lut-specification-1.0.pdf static void ImportCubeLut(string path) { // Remove the 'Assets' part of the path & build absolute path string fullpath = path.Substring(7); fullpath = Path.Combine(Application.dataPath, fullpath); // Read the lut data string[] lines = File.ReadAllLines(fullpath); // Start parsing int i = 0; int size = -1; int sizeCube = -1; var table = new List<Color>(); var domainMin = Color.black; var domainMax = Color.white; while (true) { if (i >= lines.Length) { if (table.Count != sizeCube) Debug.LogError("Premature end of file"); break; } string line = FilterLine(lines[i]); if (string.IsNullOrEmpty(line)) goto next; // Header data if (line.StartsWith("TITLE")) goto next; // Skip the title tag, we don't need it if (line.StartsWith("LUT_3D_SIZE")) { string sizeStr = line.Substring(11).TrimStart(); if (!int.TryParse(sizeStr, out size)) { Debug.LogError("Invalid data on line " + i); break; } if (size < 2 || size > 256) { Debug.LogError("LUT size out of range"); break; } sizeCube = size * size * size; goto next; } if (line.StartsWith("DOMAIN_MIN")) { if (!ParseDomain(i, line, ref domainMin)) break; goto next; } if (line.StartsWith("DOMAIN_MAX")) { if (!ParseDomain(i, line, ref domainMax)) break; goto next; } // Table string[] row = line.Split(); if (row.Length != 3) { Debug.LogError("Invalid data on line " + i); break; } var color = Color.black; for (int j = 0; j < 3; j++) { float d; if (!float.TryParse(row[j], out d)) { Debug.LogError("Invalid data on line " + i); break; } color[j] = d; } table.Add(color); next: i++; } if (sizeCube != table.Count) { Debug.LogError("Wrong table size - Expected " + sizeCube + " elements, got " + table.Count); return; } // Check if the Texture3D already exists, update it in this case (better workflow for // the user) string assetPath = Path.ChangeExtension(path, ".asset"); var tex = AssetDatabase.LoadAssetAtPath<Texture3D>(assetPath); if (tex != null) { tex.SetPixels(table.ToArray(), 0); tex.Apply(); } else { // Generate a new Texture3D tex = new Texture3D(size, size, size, TextureFormat.RGBAHalf, false) { anisoLevel = 0, filterMode = FilterMode.Bilinear, wrapMode = TextureWrapMode.Clamp, }; tex.SetPixels(table.ToArray(), 0); tex.Apply(); // Save to disk AssetDatabase.CreateAsset(tex, assetPath); } AssetDatabase.SaveAssets(); AssetDatabase.Refresh(); } static string FilterLine(string line) { var filtered = new StringBuilder(); line = line.TrimStart().TrimEnd(); int len = line.Length; int i = 0; while (i < len) { char c = line[i]; if (c == '#') // Filters comment out break; filtered.Append(c); i++; } return filtered.ToString(); } static bool ParseDomain(int i, string line, ref Color domain) { string[] domainStrs = line.Substring(10).TrimStart().Split(); if (domainStrs.Length != 3) { Debug.LogError("Invalid data on line " + i); return false; } for (int j = 0; j < 3; j++) { float d; if (!float.TryParse(domainStrs[j], out d)) { Debug.LogError("Invalid data on line " + i); return false; } domain[j] = d; } return true; } } }
218
orrb
openai
C#
using System; using System.Linq; namespace UnityEditor.Rendering.PostProcessing { [InitializeOnLoad] sealed class DefineSetter { const string k_Define = "UNITY_POST_PROCESSING_STACK_V2"; static DefineSetter() { var targets = Enum.GetValues(typeof(BuildTargetGroup)) .Cast<BuildTargetGroup>() .Where(x => x != BuildTargetGroup.Unknown) .Where(x => !IsObsolete(x)); foreach (var target in targets) { var defines = PlayerSettings.GetScriptingDefineSymbolsForGroup(target).Trim(); var list = defines.Split(';', ' ') .Where(x => !string.IsNullOrEmpty(x)) .ToList(); if (list.Contains(k_Define)) continue; list.Add(k_Define); defines = list.Aggregate((a, b) => a + ";" + b); PlayerSettings.SetScriptingDefineSymbolsForGroup(target, defines); } } static bool IsObsolete(BuildTargetGroup group) { var attrs = typeof(BuildTargetGroup) .GetField(group.ToString()) .GetCustomAttributes(typeof(ObsoleteAttribute), false); return attrs != null && attrs.Length > 0; } } }
46
orrb
openai
C#
using UnityEngine; using UnityEditor.ProjectWindowCallback; using System.IO; using UnityEngine.SceneManagement; using UnityEngine.Rendering.PostProcessing; namespace UnityEditor.Rendering.PostProcessing { public class ProfileFactory { [MenuItem("Assets/Create/Post-processing Profile", priority = 201)] static void CreatePostProcessProfile() { //var icon = EditorGUIUtility.FindTexture("ScriptableObject Icon"); ProjectWindowUtil.StartNameEditingIfProjectWindowExists(0, ScriptableObject.CreateInstance<DoCreatePostProcessProfile>(), "New Post-processing Profile.asset", null, null); } public static PostProcessProfile CreatePostProcessProfileAtPath(string path) { var profile = ScriptableObject.CreateInstance<PostProcessProfile>(); profile.name = Path.GetFileName(path); AssetDatabase.CreateAsset(profile, path); AssetDatabase.SaveAssets(); AssetDatabase.Refresh(); return profile; } public static PostProcessProfile CreatePostProcessProfile(Scene scene, string targetName) { var path = string.Empty; if (string.IsNullOrEmpty(scene.path)) { path = "Assets/"; } else { var scenePath = Path.GetDirectoryName(scene.path); var extPath = scene.name + "_Profiles"; var profilePath = scenePath + "/" + extPath; if (!AssetDatabase.IsValidFolder(profilePath)) AssetDatabase.CreateFolder(scenePath, extPath); path = profilePath + "/"; } path += targetName + " Profile.asset"; path = AssetDatabase.GenerateUniqueAssetPath(path); var profile = ScriptableObject.CreateInstance<PostProcessProfile>(); AssetDatabase.CreateAsset(profile, path); AssetDatabase.SaveAssets(); AssetDatabase.Refresh(); return profile; } } class DoCreatePostProcessProfile : EndNameEditAction { public override void Action(int instanceId, string pathName, string resourceFile) { var profile = ProfileFactory.CreatePostProcessProfileAtPath(pathName); ProjectWindowUtil.ShowCreatedAsset(profile); } } }
68
orrb
openai
C#
using UnityEngine; using UnityEngine.Rendering.PostProcessing; namespace UnityEditor.Rendering.PostProcessing { static class ResourceAssetFactory { #if POSTFX_DEBUG_MENUS [MenuItem("Tools/Post-processing/Create Resources Asset")] #endif static void CreateAsset() { var asset = ScriptableObject.CreateInstance<PostProcessResources>(); AssetDatabase.CreateAsset(asset, "Assets/PostProcessResources.asset"); AssetDatabase.SaveAssets(); AssetDatabase.Refresh(); } } }
20
orrb
openai
C#
using UnityEngine; using UnityEngine.Rendering.PostProcessing; namespace UnityEditor.Rendering.PostProcessing { public static class VolumeFactory { [MenuItem("GameObject/3D Object/Post-process Volume")] static void CreateVolume() { var gameObject = new GameObject("Post-process Volume"); var collider = gameObject.AddComponent<BoxCollider>(); collider.size = Vector3.one; collider.isTrigger = true; gameObject.AddComponent<PostProcessVolume>(); Selection.objects = new [] { gameObject }; EditorApplication.ExecuteMenuItem("GameObject/Move To View"); } } }
22
orrb
openai
C#
using System; using System.Collections.Generic; using UnityEngine; namespace UnityEditor.Rendering.PostProcessing { public sealed class CurveEditor { #region Enums enum EditMode { None, Moving, TangentEdit } enum Tangent { In, Out } #endregion #region Structs public struct Settings { public Rect bounds; public RectOffset padding; public Color selectionColor; public float curvePickingDistance; public float keyTimeClampingDistance; public static Settings defaultSettings { get { return new Settings { bounds = new Rect(0f, 0f, 1f, 1f), padding = new RectOffset(10, 10, 10, 10), selectionColor = Color.yellow, curvePickingDistance = 6f, keyTimeClampingDistance = 1e-4f }; } } } public struct CurveState { public bool visible; public bool editable; public uint minPointCount; public float zeroKeyConstantValue; public Color color; public float width; public float handleWidth; public bool showNonEditableHandles; public bool onlyShowHandlesOnSelection; public bool loopInBounds; public static CurveState defaultState { get { return new CurveState { visible = true, editable = true, minPointCount = 2, zeroKeyConstantValue = 0f, color = Color.white, width = 2f, handleWidth = 2f, showNonEditableHandles = true, onlyShowHandlesOnSelection = false, loopInBounds = false }; } } } public struct Selection { public SerializedProperty curve; public int keyframeIndex; public Keyframe? keyframe; public Selection(SerializedProperty curve, int keyframeIndex, Keyframe? keyframe) { this.curve = curve; this.keyframeIndex = keyframeIndex; this.keyframe = keyframe; } } internal struct MenuAction { internal SerializedProperty curve; internal int index; internal Vector3 position; internal MenuAction(SerializedProperty curve) { this.curve = curve; this.index = -1; this.position = Vector3.zero; } internal MenuAction(SerializedProperty curve, int index) { this.curve = curve; this.index = index; this.position = Vector3.zero; } internal MenuAction(SerializedProperty curve, Vector3 position) { this.curve = curve; this.index = -1; this.position = position; } } #endregion #region Fields & properties public Settings settings { get; private set; } readonly Dictionary<SerializedProperty, CurveState> m_Curves; Rect m_CurveArea; SerializedProperty m_SelectedCurve; int m_SelectedKeyframeIndex = -1; EditMode m_EditMode = EditMode.None; Tangent m_TangentEditMode; bool m_Dirty; #endregion #region Constructors & destructors public CurveEditor() : this(Settings.defaultSettings) { } public CurveEditor(Settings settings) { this.settings = settings; m_Curves = new Dictionary<SerializedProperty, CurveState>(); } #endregion #region Public API public void Add(params SerializedProperty[] curves) { foreach (var curve in curves) Add(curve, CurveState.defaultState); } public void Add(SerializedProperty curve) { Add(curve, CurveState.defaultState); } public void Add(SerializedProperty curve, CurveState state) { // Make sure the property is in fact an AnimationCurve var animCurve = curve.animationCurveValue; if (animCurve == null) throw new ArgumentException("curve"); if (m_Curves.ContainsKey(curve)) Debug.LogWarning("Curve has already been added to the editor"); m_Curves.Add(curve, state); } public void Remove(SerializedProperty curve) { m_Curves.Remove(curve); } public void RemoveAll() { m_Curves.Clear(); } public CurveState GetCurveState(SerializedProperty curve) { CurveState state; if (!m_Curves.TryGetValue(curve, out state)) throw new KeyNotFoundException("curve"); return state; } public void SetCurveState(SerializedProperty curve, CurveState state) { if (!m_Curves.ContainsKey(curve)) throw new KeyNotFoundException("curve"); m_Curves[curve] = state; } public Selection GetSelection() { Keyframe? key = null; if (m_SelectedKeyframeIndex > -1) { var curve = m_SelectedCurve.animationCurveValue; if (m_SelectedKeyframeIndex >= curve.length) m_SelectedKeyframeIndex = -1; else key = curve[m_SelectedKeyframeIndex]; } return new Selection(m_SelectedCurve, m_SelectedKeyframeIndex, key); } public void SetKeyframe(SerializedProperty curve, int keyframeIndex, Keyframe keyframe) { var animCurve = curve.animationCurveValue; SetKeyframe(animCurve, keyframeIndex, keyframe); SaveCurve(curve, animCurve); } public bool OnGUI(Rect rect) { if (Event.current.type == EventType.Repaint) m_Dirty = false; GUI.BeginClip(rect); { var area = new Rect(Vector2.zero, rect.size); m_CurveArea = settings.padding.Remove(area); foreach (var curve in m_Curves) OnCurveGUI(area, curve.Key, curve.Value); OnGeneralUI(area); } GUI.EndClip(); return m_Dirty; } #endregion #region UI & events void OnCurveGUI(Rect rect, SerializedProperty curve, CurveState state) { // Discard invisible curves if (!state.visible) return; var animCurve = curve.animationCurveValue; var keys = animCurve.keys; var length = keys.Length; // Curve drawing // Slightly dim non-editable curves var color = state.color; if (!state.editable || !GUI.enabled) color.a *= 0.5f; Handles.color = color; var bounds = settings.bounds; if (length == 0) { var p1 = CurveToCanvas(new Vector3(bounds.xMin, state.zeroKeyConstantValue)); var p2 = CurveToCanvas(new Vector3(bounds.xMax, state.zeroKeyConstantValue)); Handles.DrawAAPolyLine(state.width, p1, p2); } else if (length == 1) { var p1 = CurveToCanvas(new Vector3(bounds.xMin, keys[0].value)); var p2 = CurveToCanvas(new Vector3(bounds.xMax, keys[0].value)); Handles.DrawAAPolyLine(state.width, p1, p2); } else { var prevKey = keys[0]; for (int k = 1; k < length; k++) { var key = keys[k]; var pts = BezierSegment(prevKey, key); if (float.IsInfinity(prevKey.outTangent) || float.IsInfinity(key.inTangent)) { var s = HardSegment(prevKey, key); Handles.DrawAAPolyLine(state.width, s[0], s[1], s[2]); } else Handles.DrawBezier(pts[0], pts[3], pts[1], pts[2], color, null, state.width); prevKey = key; } // Curve extents & loops if (keys[0].time > bounds.xMin) { if (state.loopInBounds) { var p1 = keys[length - 1]; p1.time -= settings.bounds.width; var p2 = keys[0]; var pts = BezierSegment(p1, p2); if (float.IsInfinity(p1.outTangent) || float.IsInfinity(p2.inTangent)) { var s = HardSegment(p1, p2); Handles.DrawAAPolyLine(state.width, s[0], s[1], s[2]); } else Handles.DrawBezier(pts[0], pts[3], pts[1], pts[2], color, null, state.width); } else { var p1 = CurveToCanvas(new Vector3(bounds.xMin, keys[0].value)); var p2 = CurveToCanvas(keys[0]); Handles.DrawAAPolyLine(state.width, p1, p2); } } if (keys[length - 1].time < bounds.xMax) { if (state.loopInBounds) { var p1 = keys[length - 1]; var p2 = keys[0]; p2.time += settings.bounds.width; var pts = BezierSegment(p1, p2); if (float.IsInfinity(p1.outTangent) || float.IsInfinity(p2.inTangent)) { var s = HardSegment(p1, p2); Handles.DrawAAPolyLine(state.width, s[0], s[1], s[2]); } else Handles.DrawBezier(pts[0], pts[3], pts[1], pts[2], color, null, state.width); } else { var p1 = CurveToCanvas(keys[length - 1]); var p2 = CurveToCanvas(new Vector3(bounds.xMax, keys[length - 1].value)); Handles.DrawAAPolyLine(state.width, p1, p2); } } } // Make sure selection is correct (undo can break it) bool isCurrentlySelectedCurve = curve == m_SelectedCurve; if (isCurrentlySelectedCurve && m_SelectedKeyframeIndex >= length) m_SelectedKeyframeIndex = -1; if (!state.editable) m_SelectedKeyframeIndex = -1; float enabledFactor = GUI.enabled ? 1f : 0.8f; // Handles & keys for (int k = 0; k < length; k++) { bool isCurrentlySelectedKeyframe = k == m_SelectedKeyframeIndex; var e = Event.current; var pos = CurveToCanvas(keys[k]); var hitRect = new Rect(pos.x - 8f, pos.y - 8f, 16f, 16f); var offset = isCurrentlySelectedCurve ? new RectOffset(5, 5, 5, 5) : new RectOffset(6, 6, 6, 6); var outTangent = pos + CurveTangentToCanvas(keys[k].outTangent).normalized * 40f; var inTangent = pos - CurveTangentToCanvas(keys[k].inTangent).normalized * 40f; var inTangentHitRect = new Rect(inTangent.x - 7f, inTangent.y - 7f, 14f, 14f); var outTangentHitrect = new Rect(outTangent.x - 7f, outTangent.y - 7f, 14f, 14f); // Draw if (state.editable || state.showNonEditableHandles) { if (e.type == EventType.Repaint) { var selectedColor = (isCurrentlySelectedCurve && isCurrentlySelectedKeyframe) ? settings.selectionColor : state.color; // Keyframe EditorGUI.DrawRect(offset.Remove(hitRect), selectedColor * enabledFactor); // Tangents if (isCurrentlySelectedCurve && (!state.onlyShowHandlesOnSelection || (state.onlyShowHandlesOnSelection && isCurrentlySelectedKeyframe))) { Handles.color = selectedColor * enabledFactor; if (k > 0 || state.loopInBounds) { Handles.DrawAAPolyLine(state.handleWidth, pos, inTangent); EditorGUI.DrawRect(offset.Remove(inTangentHitRect), selectedColor); } if (k < length - 1 || state.loopInBounds) { Handles.DrawAAPolyLine(state.handleWidth, pos, outTangent); EditorGUI.DrawRect(offset.Remove(outTangentHitrect), selectedColor); } } } } // Events if (state.editable) { // Keyframe move if (m_EditMode == EditMode.Moving && e.type == EventType.MouseDrag && isCurrentlySelectedCurve && isCurrentlySelectedKeyframe) { EditMoveKeyframe(animCurve, keys, k); } // Tangent editing if (m_EditMode == EditMode.TangentEdit && e.type == EventType.MouseDrag && isCurrentlySelectedCurve && isCurrentlySelectedKeyframe) { bool alreadyBroken = !(Mathf.Approximately(keys[k].inTangent, keys[k].outTangent) || (float.IsInfinity(keys[k].inTangent) && float.IsInfinity(keys[k].outTangent))); EditMoveTangent(animCurve, keys, k, m_TangentEditMode, e.shift || !(alreadyBroken || e.control)); } // Keyframe selection & context menu if (e.type == EventType.MouseDown && rect.Contains(e.mousePosition)) { if (hitRect.Contains(e.mousePosition)) { if (e.button == 0) { SelectKeyframe(curve, k); m_EditMode = EditMode.Moving; e.Use(); } else if (e.button == 1) { // Keyframe context menu var menu = new GenericMenu(); menu.AddItem(new GUIContent("Delete Key"), false, (x) => { var action = (MenuAction)x; var curveValue = action.curve.animationCurveValue; action.curve.serializedObject.Update(); RemoveKeyframe(curveValue, action.index); m_SelectedKeyframeIndex = -1; SaveCurve(action.curve, curveValue); action.curve.serializedObject.ApplyModifiedProperties(); }, new MenuAction(curve, k)); menu.ShowAsContext(); e.Use(); } } } // Tangent selection & edit mode if (e.type == EventType.MouseDown && rect.Contains(e.mousePosition)) { if (inTangentHitRect.Contains(e.mousePosition) && (k > 0 || state.loopInBounds)) { SelectKeyframe(curve, k); m_EditMode = EditMode.TangentEdit; m_TangentEditMode = Tangent.In; e.Use(); } else if (outTangentHitrect.Contains(e.mousePosition) && (k < length - 1 || state.loopInBounds)) { SelectKeyframe(curve, k); m_EditMode = EditMode.TangentEdit; m_TangentEditMode = Tangent.Out; e.Use(); } } // Mouse up - clean up states if (e.rawType == EventType.MouseUp && m_EditMode != EditMode.None) { m_EditMode = EditMode.None; } // Set cursors { EditorGUIUtility.AddCursorRect(hitRect, MouseCursor.MoveArrow); if (k > 0 || state.loopInBounds) EditorGUIUtility.AddCursorRect(inTangentHitRect, MouseCursor.RotateArrow); if (k < length - 1 || state.loopInBounds) EditorGUIUtility.AddCursorRect(outTangentHitrect, MouseCursor.RotateArrow); } } } Handles.color = Color.white; SaveCurve(curve, animCurve); } void OnGeneralUI(Rect rect) { var e = Event.current; // Selection if (e.type == EventType.MouseDown) { GUI.FocusControl(null); m_SelectedCurve = null; m_SelectedKeyframeIndex = -1; bool used = false; var hit = CanvasToCurve(e.mousePosition); float curvePickValue = CurveToCanvas(hit).y; // Try and select a curve foreach (var curve in m_Curves) { if (!curve.Value.editable || !curve.Value.visible) continue; var prop = curve.Key; var state = curve.Value; var animCurve = prop.animationCurveValue; float hitY = animCurve.length == 0 ? state.zeroKeyConstantValue : animCurve.Evaluate(hit.x); var curvePos = CurveToCanvas(new Vector3(hit.x, hitY)); if (Mathf.Abs(curvePos.y - curvePickValue) < settings.curvePickingDistance) { m_SelectedCurve = prop; if (e.clickCount == 2 && e.button == 0) { // Create a keyframe on double-click on this curve EditCreateKeyframe(animCurve, hit, true, state.zeroKeyConstantValue); SaveCurve(prop, animCurve); } else if (e.button == 1) { // Curve context menu var menu = new GenericMenu(); menu.AddItem(new GUIContent("Add Key"), false, (x) => { var action = (MenuAction)x; var curveValue = action.curve.animationCurveValue; action.curve.serializedObject.Update(); EditCreateKeyframe(curveValue, hit, true, 0f); SaveCurve(action.curve, curveValue); action.curve.serializedObject.ApplyModifiedProperties(); }, new MenuAction(prop, hit)); menu.ShowAsContext(); e.Use(); used = true; } } } if (e.clickCount == 2 && e.button == 0 && m_SelectedCurve == null) { // Create a keyframe on every curve on double-click foreach (var curve in m_Curves) { if (!curve.Value.editable || !curve.Value.visible) continue; var prop = curve.Key; var state = curve.Value; var animCurve = prop.animationCurveValue; EditCreateKeyframe(animCurve, hit, e.alt, state.zeroKeyConstantValue); SaveCurve(prop, animCurve); } } else if (!used && e.button == 1) { // Global context menu var menu = new GenericMenu(); menu.AddItem(new GUIContent("Add Key At Position"), false, () => ContextMenuAddKey(hit, false)); menu.AddItem(new GUIContent("Add Key On Curves"), false, () => ContextMenuAddKey(hit, true)); menu.ShowAsContext(); } e.Use(); } // Delete selected key(s) if (e.type == EventType.KeyDown && (e.keyCode == KeyCode.Delete || e.keyCode == KeyCode.Backspace)) { if (m_SelectedKeyframeIndex != -1 && m_SelectedCurve != null) { var animCurve = m_SelectedCurve.animationCurveValue; var length = animCurve.length; if (m_Curves[m_SelectedCurve].minPointCount < length && length >= 0) { EditDeleteKeyframe(animCurve, m_SelectedKeyframeIndex); m_SelectedKeyframeIndex = -1; SaveCurve(m_SelectedCurve, animCurve); } e.Use(); } } } void SaveCurve(SerializedProperty prop, AnimationCurve curve) { prop.animationCurveValue = curve; } void Invalidate() { m_Dirty = true; } #endregion #region Keyframe manipulations void SelectKeyframe(SerializedProperty curve, int keyframeIndex) { m_SelectedKeyframeIndex = keyframeIndex; m_SelectedCurve = curve; Invalidate(); } void ContextMenuAddKey(Vector3 hit, bool createOnCurve) { SerializedObject serializedObject = null; foreach (var curve in m_Curves) { if (!curve.Value.editable || !curve.Value.visible) continue; var prop = curve.Key; var state = curve.Value; if (serializedObject == null) { serializedObject = prop.serializedObject; serializedObject.Update(); } var animCurve = prop.animationCurveValue; EditCreateKeyframe(animCurve, hit, createOnCurve, state.zeroKeyConstantValue); SaveCurve(prop, animCurve); } if (serializedObject != null) serializedObject.ApplyModifiedProperties(); Invalidate(); } void EditCreateKeyframe(AnimationCurve curve, Vector3 position, bool createOnCurve, float zeroKeyConstantValue) { float tangent = EvaluateTangent(curve, position.x); if (createOnCurve) { position.y = curve.length == 0 ? zeroKeyConstantValue : curve.Evaluate(position.x); } AddKeyframe(curve, new Keyframe(position.x, position.y, tangent, tangent)); } void EditDeleteKeyframe(AnimationCurve curve, int keyframeIndex) { RemoveKeyframe(curve, keyframeIndex); } void AddKeyframe(AnimationCurve curve, Keyframe newValue) { curve.AddKey(newValue); Invalidate(); } void RemoveKeyframe(AnimationCurve curve, int keyframeIndex) { curve.RemoveKey(keyframeIndex); Invalidate(); } void SetKeyframe(AnimationCurve curve, int keyframeIndex, Keyframe newValue) { var keys = curve.keys; if (keyframeIndex > 0) newValue.time = Mathf.Max(keys[keyframeIndex - 1].time + settings.keyTimeClampingDistance, newValue.time); if (keyframeIndex < keys.Length - 1) newValue.time = Mathf.Min(keys[keyframeIndex + 1].time - settings.keyTimeClampingDistance, newValue.time); curve.MoveKey(keyframeIndex, newValue); Invalidate(); } void EditMoveKeyframe(AnimationCurve curve, Keyframe[] keys, int keyframeIndex) { var key = CanvasToCurve(Event.current.mousePosition); float inTgt = keys[keyframeIndex].inTangent; float outTgt = keys[keyframeIndex].outTangent; SetKeyframe(curve, keyframeIndex, new Keyframe(key.x, key.y, inTgt, outTgt)); } void EditMoveTangent(AnimationCurve curve, Keyframe[] keys, int keyframeIndex, Tangent targetTangent, bool linkTangents) { var pos = CanvasToCurve(Event.current.mousePosition); float time = keys[keyframeIndex].time; float value = keys[keyframeIndex].value; pos -= new Vector3(time, value); if (targetTangent == Tangent.In && pos.x > 0f) pos.x = 0f; if (targetTangent == Tangent.Out && pos.x < 0f) pos.x = 0f; float tangent; if (Mathf.Approximately(pos.x, 0f)) tangent = pos.y < 0f ? float.PositiveInfinity : float.NegativeInfinity; else tangent = pos.y / pos.x; float inTangent = keys[keyframeIndex].inTangent; float outTangent = keys[keyframeIndex].outTangent; if (targetTangent == Tangent.In || linkTangents) inTangent = tangent; if (targetTangent == Tangent.Out || linkTangents) outTangent = tangent; SetKeyframe(curve, keyframeIndex, new Keyframe(time, value, inTangent, outTangent)); } #endregion #region Maths utilities Vector3 CurveToCanvas(Keyframe keyframe) { return CurveToCanvas(new Vector3(keyframe.time, keyframe.value)); } Vector3 CurveToCanvas(Vector3 position) { var bounds = settings.bounds; var output = new Vector3((position.x - bounds.x) / (bounds.xMax - bounds.x), (position.y - bounds.y) / (bounds.yMax - bounds.y)); output.x = output.x * (m_CurveArea.xMax - m_CurveArea.xMin) + m_CurveArea.xMin; output.y = (1f - output.y) * (m_CurveArea.yMax - m_CurveArea.yMin) + m_CurveArea.yMin; return output; } Vector3 CanvasToCurve(Vector3 position) { var bounds = settings.bounds; var output = position; output.x = (output.x - m_CurveArea.xMin) / (m_CurveArea.xMax - m_CurveArea.xMin); output.y = (output.y - m_CurveArea.yMin) / (m_CurveArea.yMax - m_CurveArea.yMin); output.x = Mathf.Lerp(bounds.x, bounds.xMax, output.x); output.y = Mathf.Lerp(bounds.yMax, bounds.y, output.y); return output; } Vector3 CurveTangentToCanvas(float tangent) { if (!float.IsInfinity(tangent)) { var bounds = settings.bounds; float ratio = (m_CurveArea.width / m_CurveArea.height) / ((bounds.xMax - bounds.x) / (bounds.yMax - bounds.y)); return new Vector3(1f, -tangent / ratio).normalized; } return float.IsPositiveInfinity(tangent) ? Vector3.up : Vector3.down; } Vector3[] BezierSegment(Keyframe start, Keyframe end) { var segment = new Vector3[4]; segment[0] = CurveToCanvas(new Vector3(start.time, start.value)); segment[3] = CurveToCanvas(new Vector3(end.time, end.value)); float middle = start.time + ((end.time - start.time) * 0.333333f); float middle2 = start.time + ((end.time - start.time) * 0.666666f); segment[1] = CurveToCanvas(new Vector3(middle, ProjectTangent(start.time, start.value, start.outTangent, middle))); segment[2] = CurveToCanvas(new Vector3(middle2, ProjectTangent(end.time, end.value, end.inTangent, middle2))); return segment; } Vector3[] HardSegment(Keyframe start, Keyframe end) { var segment = new Vector3[3]; segment[0] = CurveToCanvas(start); segment[1] = CurveToCanvas(new Vector3(end.time, start.value)); segment[2] = CurveToCanvas(end); return segment; } float ProjectTangent(float inPosition, float inValue, float inTangent, float projPosition) { return inValue + ((projPosition - inPosition) * inTangent); } float EvaluateTangent(AnimationCurve curve, float time) { int prev = -1, next = 0; for (int i = 0; i < curve.keys.Length; i++) { if (time > curve.keys[i].time) { prev = i; next = i + 1; } else break; } if (next == 0) return 0f; if (prev == curve.keys.Length - 1) return 0f; const float kD = 1e-3f; float tp = Mathf.Max(time - kD, curve.keys[prev].time); float tn = Mathf.Min(time + kD, curve.keys[next].time); float vp = curve.Evaluate(tp); float vn = curve.Evaluate(tn); if (Mathf.Approximately(tn, tp)) return (vn - vp > 0f) ? float.PositiveInfinity : float.NegativeInfinity; return (vn - vp) / (tn - tp); } #endregion } }
860
orrb
openai
C#
using System; using System.Collections.Generic; using System.Linq; using UnityEngine; using UnityEngine.Assertions; using UnityEngine.Rendering.PostProcessing; namespace UnityEditor.Rendering.PostProcessing { public static class EditorUtilities { static Dictionary<string, GUIContent> s_GUIContentCache; static Dictionary<Type, AttributeDecorator> s_AttributeDecorators; static PostProcessEffectSettings s_ClipboardContent; static EditorUtilities() { s_GUIContentCache = new Dictionary<string, GUIContent>(); s_AttributeDecorators = new Dictionary<Type, AttributeDecorator>(); ReloadDecoratorTypes(); } [Callbacks.DidReloadScripts] static void OnEditorReload() { ReloadDecoratorTypes(); } static void ReloadDecoratorTypes() { s_AttributeDecorators.Clear(); // Look for all the valid attribute decorators var types = RuntimeUtilities.GetAllAssemblyTypes() .Where( t => t.IsSubclassOf(typeof(AttributeDecorator)) && t.IsDefined(typeof(DecoratorAttribute), false) && !t.IsAbstract ); // Store them foreach (var type in types) { var attr = type.GetAttribute<DecoratorAttribute>(); var decorator = (AttributeDecorator)Activator.CreateInstance(type); s_AttributeDecorators.Add(attr.attributeType, decorator); } } internal static AttributeDecorator GetDecorator(Type attributeType) { AttributeDecorator decorator; return !s_AttributeDecorators.TryGetValue(attributeType, out decorator) ? null : decorator; } public static GUIContent GetContent(string textAndTooltip) { if (string.IsNullOrEmpty(textAndTooltip)) return GUIContent.none; GUIContent content; if (!s_GUIContentCache.TryGetValue(textAndTooltip, out content)) { var s = textAndTooltip.Split('|'); content = new GUIContent(s[0]); if (s.Length > 1 && !string.IsNullOrEmpty(s[1])) content.tooltip = s[1]; s_GUIContentCache.Add(textAndTooltip, content); } return content; } public static void DrawFixMeBox(string text, Action action) { Assert.IsNotNull(action); EditorGUILayout.HelpBox(text, MessageType.Warning); GUILayout.Space(-32); using (new EditorGUILayout.HorizontalScope()) { GUILayout.FlexibleSpace(); if (GUILayout.Button("Fix", GUILayout.Width(60))) action(); GUILayout.Space(8); } GUILayout.Space(11); } public static void DrawSplitter() { var rect = GUILayoutUtility.GetRect(1f, 1f); // Splitter rect should be full-width rect.xMin = 0f; rect.width += 4f; if (Event.current.type != EventType.Repaint) return; EditorGUI.DrawRect(rect, !EditorGUIUtility.isProSkin ? new Color(0.6f, 0.6f, 0.6f, 1.333f) : new Color(0.12f, 0.12f, 0.12f, 1.333f)); } public static void DrawOverrideCheckbox(Rect rect, SerializedProperty property) { var oldColor = GUI.color; GUI.color = new Color(0.6f, 0.6f, 0.6f, 0.75f); property.boolValue = GUI.Toggle(rect, property.boolValue, GetContent("|Override this setting for this volume."), Styling.smallTickbox); GUI.color = oldColor; } public static void DrawHeaderLabel(string title) { EditorGUILayout.LabelField(title, Styling.labelHeader); } public static bool DrawHeader(string title, bool state) { var backgroundRect = GUILayoutUtility.GetRect(1f, 17f); var labelRect = backgroundRect; labelRect.xMin += 16f; labelRect.xMax -= 20f; var foldoutRect = backgroundRect; foldoutRect.y += 1f; foldoutRect.width = 13f; foldoutRect.height = 13f; // Background rect should be full-width backgroundRect.xMin = 0f; backgroundRect.width += 4f; // Background float backgroundTint = EditorGUIUtility.isProSkin ? 0.1f : 1f; EditorGUI.DrawRect(backgroundRect, new Color(backgroundTint, backgroundTint, backgroundTint, 0.2f)); // Title EditorGUI.LabelField(labelRect, GetContent(title), EditorStyles.boldLabel); // Active checkbox state = GUI.Toggle(foldoutRect, state, GUIContent.none, EditorStyles.foldout); var e = Event.current; if (e.type == EventType.MouseDown && backgroundRect.Contains(e.mousePosition) && e.button == 0) { state = !state; e.Use(); } return state; } public static bool DrawHeader(string title, SerializedProperty group, SerializedProperty activeField, PostProcessEffectSettings target, Action resetAction, Action removeAction) { Assert.IsNotNull(group); Assert.IsNotNull(activeField); Assert.IsNotNull(target); var backgroundRect = GUILayoutUtility.GetRect(1f, 17f); var labelRect = backgroundRect; labelRect.xMin += 16f; labelRect.xMax -= 20f; var toggleRect = backgroundRect; toggleRect.y += 2f; toggleRect.width = 13f; toggleRect.height = 13f; var menuIcon = EditorGUIUtility.isProSkin ? Styling.paneOptionsIconDark : Styling.paneOptionsIconLight; var menuRect = new Rect(labelRect.xMax + 4f, labelRect.y + 4f, menuIcon.width, menuIcon.height); // Background rect should be full-width backgroundRect.xMin = 0f; backgroundRect.width += 4f; // Background float backgroundTint = EditorGUIUtility.isProSkin ? 0.1f : 1f; EditorGUI.DrawRect(backgroundRect, new Color(backgroundTint, backgroundTint, backgroundTint, 0.2f)); // Title using (new EditorGUI.DisabledScope(!activeField.boolValue)) EditorGUI.LabelField(labelRect, GetContent(title), EditorStyles.boldLabel); // Active checkbox activeField.serializedObject.Update(); activeField.boolValue = GUI.Toggle(toggleRect, activeField.boolValue, GUIContent.none, Styling.smallTickbox); activeField.serializedObject.ApplyModifiedProperties(); // Dropdown menu icon GUI.DrawTexture(menuRect, menuIcon); // Handle events var e = Event.current; if (e.type == EventType.MouseDown) { if (menuRect.Contains(e.mousePosition)) { ShowHeaderContextMenu(new Vector2(menuRect.x, menuRect.yMax), target, resetAction, removeAction); e.Use(); } else if (labelRect.Contains(e.mousePosition)) { if (e.button == 0) group.isExpanded = !group.isExpanded; else ShowHeaderContextMenu(e.mousePosition, target, resetAction, removeAction); e.Use(); } } return group.isExpanded; } static void ShowHeaderContextMenu(Vector2 position, PostProcessEffectSettings target, Action resetAction, Action removeAction) { Assert.IsNotNull(resetAction); Assert.IsNotNull(removeAction); var menu = new GenericMenu(); menu.AddItem(GetContent("Reset"), false, () => resetAction()); menu.AddItem(GetContent("Remove"), false, () => removeAction()); menu.AddSeparator(string.Empty); menu.AddItem(GetContent("Copy Settings"), false, () => CopySettings(target)); if (CanPaste(target)) menu.AddItem(GetContent("Paste Settings"), false, () => PasteSettings(target)); else menu.AddDisabledItem(GetContent("Paste Settings")); menu.DropDown(new Rect(position, Vector2.zero)); } static void CopySettings(PostProcessEffectSettings target) { Assert.IsNotNull(target); if (s_ClipboardContent != null) { RuntimeUtilities.Destroy(s_ClipboardContent); s_ClipboardContent = null; } s_ClipboardContent = (PostProcessEffectSettings)ScriptableObject.CreateInstance(target.GetType()); EditorUtility.CopySerializedIfDifferent(target, s_ClipboardContent); } static void PasteSettings(PostProcessEffectSettings target) { Assert.IsNotNull(target); Assert.IsNotNull(s_ClipboardContent); Assert.AreEqual(s_ClipboardContent.GetType(), target.GetType()); Undo.RecordObject(target, "Paste Settings"); EditorUtility.CopySerializedIfDifferent(s_ClipboardContent, target); } static bool CanPaste(PostProcessEffectSettings target) { return s_ClipboardContent != null && s_ClipboardContent.GetType() == target.GetType(); } } }
282
orrb
openai
C#
using UnityEngine; using UnityEngine.Rendering.PostProcessing; namespace UnityEditor.Rendering.PostProcessing { static class GlobalSettings { static class Keys { internal const string trackballSensitivity = "PostProcessing.Trackball.Sensitivity"; internal const string volumeGizmoColor = "PostProcessing.Volume.GizmoColor"; internal const string currentChannelMixer = "PostProcessing.ChannelMixer.CurrentChannel"; internal const string currentCurve = "PostProcessing.Curve.Current"; } static bool m_Loaded = false; static float m_TrackballSensitivity = 0.2f; internal static float trackballSensitivity { get { return m_TrackballSensitivity; } set { TrySave(ref m_TrackballSensitivity, value, Keys.trackballSensitivity); } } static Color m_VolumeGizmoColor = new Color(0.2f, 0.8f, 0.1f, 0.5f); internal static Color volumeGizmoColor { get { return m_VolumeGizmoColor; } set { TrySave(ref m_VolumeGizmoColor, value, Keys.volumeGizmoColor); } } static int m_CurrentChannelMixer = 0; internal static int currentChannelMixer { get { return m_CurrentChannelMixer; } set { TrySave(ref m_CurrentChannelMixer, value, Keys.currentChannelMixer); } } static int m_CurrentCurve = 0; internal static int currentCurve { get { return m_CurrentCurve; } set { TrySave(ref m_CurrentCurve, value, Keys.currentCurve); } } static GlobalSettings() { Load(); } [PreferenceItem("PostProcessing")] static void PreferenceGUI() { if (!m_Loaded) Load(); EditorGUILayout.Space(); trackballSensitivity = EditorGUILayout.Slider("Trackballs Sensitivity", trackballSensitivity, 0.05f, 1f); volumeGizmoColor = EditorGUILayout.ColorField("Volume Gizmo Color", volumeGizmoColor); } static void Load() { m_TrackballSensitivity = EditorPrefs.GetFloat(Keys.trackballSensitivity, 0.2f); m_VolumeGizmoColor = GetColor(Keys.volumeGizmoColor, new Color(0.2f, 0.8f, 0.1f, 0.5f)); m_CurrentChannelMixer = EditorPrefs.GetInt(Keys.currentChannelMixer, 0); m_CurrentCurve = EditorPrefs.GetInt(Keys.currentCurve, 0); m_Loaded = true; } static Color GetColor(string key, Color defaultValue) { int value = EditorPrefs.GetInt(key, (int)ColorUtilities.ToHex(defaultValue)); return ColorUtilities.ToRGBA((uint)value); } static void TrySave<T>(ref T field, T newValue, string key) { if (field.Equals(newValue)) return; if (typeof(T) == typeof(float)) EditorPrefs.SetFloat(key, (float)(object)newValue); else if (typeof(T) == typeof(int)) EditorPrefs.SetInt(key, (int)(object)newValue); else if (typeof(T) == typeof(bool)) EditorPrefs.SetBool(key, (bool)(object)newValue); else if (typeof(T) == typeof(string)) EditorPrefs.SetString(key, (string)(object)newValue); else if (typeof(T) == typeof(Color)) EditorPrefs.SetInt(key, (int)ColorUtilities.ToHex((Color)(object)newValue)); field = newValue; } } }
99
orrb
openai
C#
using System; using System.Linq; namespace UnityEditor.Rendering.PostProcessing { public sealed class SerializedParameterOverride { public SerializedProperty overrideState { get; private set; } public SerializedProperty value { get; private set; } public Attribute[] attributes { get; private set; } internal SerializedProperty baseProperty; public string displayName { get { return baseProperty.displayName; } } internal SerializedParameterOverride(SerializedProperty property, Attribute[] attributes) { baseProperty = property.Copy(); var localCopy = baseProperty.Copy(); localCopy.Next(true); overrideState = localCopy.Copy(); localCopy.Next(false); value = localCopy.Copy(); this.attributes = attributes; } public T GetAttribute<T>() where T : Attribute { return (T)attributes.FirstOrDefault(x => x is T); } } }
39
orrb
openai
C#
using UnityEngine; using UnityEngine.Rendering.PostProcessing; namespace UnityEditor.Rendering.PostProcessing { public static class Styling { public static readonly GUIStyle smallTickbox; public static readonly GUIStyle miniLabelButton; public static readonly Texture2D paneOptionsIconDark; public static readonly Texture2D paneOptionsIconLight; public static readonly GUIStyle labelHeader; public static readonly GUIStyle wheelLabel; public static readonly GUIStyle wheelThumb; public static readonly Vector2 wheelThumbSize; public static readonly GUIStyle preLabel; static Styling() { smallTickbox = new GUIStyle("ShurikenCheckMark"); miniLabelButton = new GUIStyle(EditorStyles.miniLabel); miniLabelButton.normal = new GUIStyleState { background = RuntimeUtilities.transparentTexture, scaledBackgrounds = null, textColor = Color.grey }; var activeState = new GUIStyleState { background = RuntimeUtilities.transparentTexture, scaledBackgrounds = null, textColor = Color.white }; miniLabelButton.active = activeState; miniLabelButton.onNormal = activeState; miniLabelButton.onActive = activeState; paneOptionsIconDark = (Texture2D)EditorGUIUtility.Load("Builtin Skins/DarkSkin/Images/pane options.png"); paneOptionsIconLight = (Texture2D)EditorGUIUtility.Load("Builtin Skins/LightSkin/Images/pane options.png"); labelHeader = new GUIStyle(EditorStyles.miniLabel); wheelThumb = new GUIStyle("ColorPicker2DThumb"); wheelThumbSize = new Vector2( !Mathf.Approximately(wheelThumb.fixedWidth, 0f) ? wheelThumb.fixedWidth : wheelThumb.padding.horizontal, !Mathf.Approximately(wheelThumb.fixedHeight, 0f) ? wheelThumb.fixedHeight : wheelThumb.padding.vertical ); wheelLabel = new GUIStyle(EditorStyles.miniLabel); preLabel = new GUIStyle("ShurikenLabel"); } } }
61
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { public abstract class ParameterOverride { public bool overrideState; internal abstract void Interp(ParameterOverride from, ParameterOverride to, float t); public abstract int GetHash(); public T GetValue<T>() { return ((ParameterOverride<T>)this).value; } // This is used in case you need to access fields/properties that can't be accessed in the // constructor of a ScriptableObject (ParameterOverride are generally declared and inited in // a PostProcessEffectSettings which is a ScriptableObject). This will be called right // after the settings object has been constructed, thus allowing previously "forbidden" // fields/properties. protected internal virtual void OnEnable() { } // Here for consistency reasons (cf. OnEnable) protected internal virtual void OnDisable() { } internal abstract void SetValue(ParameterOverride parameter); } [Serializable] public class ParameterOverride<T> : ParameterOverride { public T value; public ParameterOverride() : this(default(T), false) { } public ParameterOverride(T value) : this(value, false) { } public ParameterOverride(T value, bool overrideState) { this.value = value; this.overrideState = overrideState; } internal override void Interp(ParameterOverride from, ParameterOverride to, float t) { // Note: this isn't completely safe but it'll do fine Interp(from.GetValue<T>(), to.GetValue<T>(), t); } public virtual void Interp(T from, T to, float t) { // Returns `b` if `dt > 0` by default so we don't have to write overrides for bools and // enumerations. value = t > 0f ? to : from; } public void Override(T x) { overrideState = true; value = x; } internal override void SetValue(ParameterOverride parameter) { value = parameter.GetValue<T>(); } public override int GetHash() { unchecked { int hash = 17; hash = hash * 23 + overrideState.GetHashCode(); hash = hash * 23 + value.GetHashCode(); return hash; } } // Implicit conversion; assuming the following: // // var myFloatProperty = new ParameterOverride<float> { value = 42f; }; // // It allows for implicit casts: // // float myFloat = myFloatProperty.value; // No implicit cast // float myFloat = myFloatProperty; // Implicit cast // // For safety reason this is one-way only. public static implicit operator T(ParameterOverride<T> prop) { return prop.value; } } // Bypassing the limited unity serialization system... [Serializable] public sealed class FloatParameter : ParameterOverride<float> { public override void Interp(float from, float to, float t) { value = from + (to - from) * t; } } [Serializable] public sealed class IntParameter : ParameterOverride<int> { public override void Interp(int from, int to, float t) { // Int snapping interpolation. Don't use this for enums as they don't necessarily have // contiguous values. Use the default interpolator instead (same as bool). value = (int)(from + (to - from) * t); } } [Serializable] public sealed class BoolParameter : ParameterOverride<bool> {} [Serializable] public sealed class ColorParameter : ParameterOverride<Color> { public override void Interp(Color from, Color to, float t) { // Lerping color values is a sensitive subject... We looked into lerping colors using // HSV and LCH but they have some downsides that make them not work correctly in all // situations, so we stick with RGB lerping for now, at least its behavior is // predictable despite looking desaturated when `t ~= 0.5` and it's faster anyway. value.r = from.r + (to.r - from.r) * t; value.g = from.g + (to.g - from.g) * t; value.b = from.b + (to.b - from.b) * t; value.a = from.a + (to.a - from.a) * t; } } [Serializable] public sealed class Vector2Parameter : ParameterOverride<Vector2> { public override void Interp(Vector2 from, Vector2 to, float t) { value.x = from.x + (to.x - from.x) * t; value.y = from.y + (to.y - from.y) * t; } } [Serializable] public sealed class Vector3Parameter : ParameterOverride<Vector3> { public override void Interp(Vector3 from, Vector3 to, float t) { value.x = from.x + (to.x - from.x) * t; value.y = from.y + (to.y - from.y) * t; value.z = from.z + (to.z - from.z) * t; } } [Serializable] public sealed class Vector4Parameter : ParameterOverride<Vector4> { public override void Interp(Vector4 from, Vector4 to, float t) { value.x = from.x + (to.x - from.x) * t; value.y = from.y + (to.y - from.y) * t; value.z = from.z + (to.z - from.z) * t; value.w = from.w + (to.w - from.w) * t; } } [Serializable] public sealed class SplineParameter : ParameterOverride<Spline> { protected internal override void OnEnable() { if (value != null) value.Cache(int.MinValue); } public override void Interp(Spline from, Spline to, float t) { int frameCount = Time.renderedFrameCount; if (from == null || to == null) { base.Interp(from, to, t); return; } from.Cache(frameCount); to.Cache(frameCount); for (int i = 0; i < Spline.k_Precision; i++) { float a = from.cachedData[i]; float b = to.cachedData[i]; value.cachedData[i] = a + (b - a) * t; } } } [Serializable] public sealed class TextureParameter : ParameterOverride<Texture> { public override void Interp(Texture from, Texture to, float t) { if (from == null || to == null) { base.Interp(from, to, t); return; } value = TextureLerper.instance.Lerp(from, to, t); } } }
226
orrb
openai
C#
using System; using UnityEngine.Assertions; namespace UnityEngine.Rendering.PostProcessing { public sealed class PostProcessBundle { public PostProcessAttribute attribute { get; private set; } public PostProcessEffectSettings settings { get; private set; } internal PostProcessEffectRenderer renderer { get { if (m_Renderer == null) { Assert.IsNotNull(attribute.renderer); var rendererType = attribute.renderer; m_Renderer = (PostProcessEffectRenderer)Activator.CreateInstance(rendererType); m_Renderer.SetSettings(settings); m_Renderer.Init(); } return m_Renderer; } } PostProcessEffectRenderer m_Renderer; internal PostProcessBundle(PostProcessEffectSettings settings) { // If settings is null, it means that at some point a null element has been added to // the volume effect list or there was a deserialization error and a reference to // the settings scriptableobject was lost Assert.IsNotNull(settings); this.settings = settings; attribute = settings.GetType().GetAttribute<PostProcessAttribute>(); } internal void Release() { if (m_Renderer != null) m_Renderer.Release(); RuntimeUtilities.Destroy(settings); } internal void ResetHistory() { if (m_Renderer != null) m_Renderer.ResetHistory(); } internal T CastSettings<T>() where T : PostProcessEffectSettings { return (T)settings; } internal T CastRenderer<T>() where T : PostProcessEffectRenderer { return (T)renderer; } } }
67
orrb
openai
C#
namespace UnityEngine.Rendering.PostProcessing { [ExecuteInEditMode] [AddComponentMenu("Rendering/Post-process Debug", 1002)] public sealed class PostProcessDebug : MonoBehaviour { public PostProcessLayer postProcessLayer; PostProcessLayer m_PreviousPostProcessLayer; public bool lightMeter; public bool histogram; public bool waveform; public bool vectorscope; public DebugOverlay debugOverlay = DebugOverlay.None; Camera m_CurrentCamera; CommandBuffer m_CmdAfterEverything; void OnEnable() { m_CmdAfterEverything = new CommandBuffer { name = "Post-processing Debug Overlay" }; #if UNITY_EDITOR // Update is only called on object change when ExecuteInEditMode is set, but we need it // to execute on every frame no matter what when not in play mode, so we'll use the // editor update loop instead... UnityEditor.EditorApplication.update += UpdateStates; #endif } void OnDisable() { #if UNITY_EDITOR UnityEditor.EditorApplication.update -= UpdateStates; #endif if (m_CurrentCamera != null) m_CurrentCamera.RemoveCommandBuffer(CameraEvent.AfterImageEffects, m_CmdAfterEverything); m_CurrentCamera = null; m_PreviousPostProcessLayer = null; } #if !UNITY_EDITOR void Update() { UpdateStates(); } #endif void Reset() { postProcessLayer = GetComponent<PostProcessLayer>(); } void UpdateStates() { if (m_PreviousPostProcessLayer != postProcessLayer) { // Remove cmdbuffer from previously set camera if (m_CurrentCamera != null) { m_CurrentCamera.RemoveCommandBuffer(CameraEvent.AfterImageEffects, m_CmdAfterEverything); m_CurrentCamera = null; } m_PreviousPostProcessLayer = postProcessLayer; // Add cmdbuffer to the currently set camera if (postProcessLayer != null) { m_CurrentCamera = postProcessLayer.GetComponent<Camera>(); m_CurrentCamera.AddCommandBuffer(CameraEvent.AfterImageEffects, m_CmdAfterEverything); } } if (postProcessLayer == null || !postProcessLayer.enabled) return; // Monitors if (lightMeter) postProcessLayer.debugLayer.RequestMonitorPass(MonitorType.LightMeter); if (histogram) postProcessLayer.debugLayer.RequestMonitorPass(MonitorType.Histogram); if (waveform) postProcessLayer.debugLayer.RequestMonitorPass(MonitorType.Waveform); if (vectorscope) postProcessLayer.debugLayer.RequestMonitorPass(MonitorType.Vectorscope); // Overlay postProcessLayer.debugLayer.RequestDebugOverlay(debugOverlay); } void OnPostRender() { m_CmdAfterEverything.Clear(); if (postProcessLayer == null || !postProcessLayer.enabled || !postProcessLayer.debugLayer.debugOverlayActive) return; m_CmdAfterEverything.Blit(postProcessLayer.debugLayer.debugOverlayTarget, BuiltinRenderTextureType.CameraTarget); } void OnGUI() { if (postProcessLayer == null || !postProcessLayer.enabled) return; var rect = new Rect(5, 5, 0, 0); var debugLayer = postProcessLayer.debugLayer; DrawMonitor(ref rect, debugLayer.lightMeter, lightMeter); DrawMonitor(ref rect, debugLayer.histogram, histogram); DrawMonitor(ref rect, debugLayer.waveform, waveform); DrawMonitor(ref rect, debugLayer.vectorscope, vectorscope); } void DrawMonitor(ref Rect rect, Monitor monitor, bool enabled) { if (!enabled || monitor.output == null) return; rect.width = monitor.output.width; rect.height = monitor.output.height; GUI.DrawTexture(rect, monitor.output); rect.x += monitor.output.width + 5f; } } }
126
orrb
openai
C#
using System; using System.Collections.Generic; namespace UnityEngine.Rendering.PostProcessing { public enum DebugOverlay { None, Depth, Normals, MotionVectors, NANTracker, ColorBlindnessSimulation, _, AmbientOcclusion, BloomBuffer, BloomThreshold, DepthOfField } public enum ColorBlindnessType { Deuteranopia, Protanopia, Tritanopia } [Serializable] public sealed class PostProcessDebugLayer { // Monitors public LightMeterMonitor lightMeter; public HistogramMonitor histogram; public WaveformMonitor waveform; public VectorscopeMonitor vectorscope; Dictionary<MonitorType, Monitor> m_Monitors; // Current frame size int frameWidth; int frameHeight; public RenderTexture debugOverlayTarget { get; private set; } // Set to true if the frame that was just drawn as a debug overlay enabled and rendered public bool debugOverlayActive { get; private set; } // This is reset to None after rendering of post-processing has finished public DebugOverlay debugOverlay { get; private set; } // Overlay settings in a separate class to keep things separated [Serializable] public class OverlaySettings { [Range(0f, 16f)] public float motionColorIntensity = 4f; [Range(4, 128)] public int motionGridSize = 64; public ColorBlindnessType colorBlindnessType = ColorBlindnessType.Deuteranopia; [Range(0f, 1f)] public float colorBlindnessStrength = 1f; } public OverlaySettings overlaySettings; internal void OnEnable() { RuntimeUtilities.CreateIfNull(ref lightMeter); RuntimeUtilities.CreateIfNull(ref histogram); RuntimeUtilities.CreateIfNull(ref waveform); RuntimeUtilities.CreateIfNull(ref vectorscope); RuntimeUtilities.CreateIfNull(ref overlaySettings); m_Monitors = new Dictionary<MonitorType, Monitor> { { MonitorType.LightMeter, lightMeter }, { MonitorType.Histogram, histogram }, { MonitorType.Waveform, waveform }, { MonitorType.Vectorscope, vectorscope } }; foreach (var kvp in m_Monitors) kvp.Value.OnEnable(); } internal void OnDisable() { foreach (var kvp in m_Monitors) kvp.Value.OnDisable(); DestroyDebugOverlayTarget(); } void DestroyDebugOverlayTarget() { RuntimeUtilities.Destroy(debugOverlayTarget); debugOverlayTarget = null; } // Per-frame requests public void RequestMonitorPass(MonitorType monitor) { m_Monitors[monitor].requested = true; } public void RequestDebugOverlay(DebugOverlay mode) { debugOverlay = mode; } // Sets the current frame size - used to make sure the debug overlay target is always the // correct size - mostly useful in the editor as the user can easily resize the gameview. internal void SetFrameSize(int width, int height) { frameWidth = width; frameHeight = height; debugOverlayActive = false; } // Blits to the debug target and mark this frame as using a debug overlay public void PushDebugOverlay(CommandBuffer cmd, RenderTargetIdentifier source, PropertySheet sheet, int pass) { if (debugOverlayTarget == null || !debugOverlayTarget.IsCreated() || debugOverlayTarget.width != frameWidth || debugOverlayTarget.height != frameHeight) { RuntimeUtilities.Destroy(debugOverlayTarget); debugOverlayTarget = new RenderTexture(frameWidth, frameHeight, 0, RenderTextureFormat.ARGB32) { name = "Debug Overlay Target", anisoLevel = 1, filterMode = FilterMode.Bilinear, wrapMode = TextureWrapMode.Clamp, hideFlags = HideFlags.HideAndDontSave }; debugOverlayTarget.Create(); } cmd.BlitFullscreenTriangle(source, debugOverlayTarget, sheet, pass); debugOverlayActive = true; } internal DepthTextureMode GetCameraFlags() { if (debugOverlay == DebugOverlay.Depth) return DepthTextureMode.Depth; if (debugOverlay == DebugOverlay.Normals) return DepthTextureMode.DepthNormals; if (debugOverlay == DebugOverlay.MotionVectors) return DepthTextureMode.MotionVectors | DepthTextureMode.Depth; return DepthTextureMode.None; } internal void RenderMonitors(PostProcessRenderContext context) { // Monitors bool anyActive = false; bool needsHalfRes = false; foreach (var kvp in m_Monitors) { bool active = kvp.Value.IsRequestedAndSupported(); anyActive |= active; needsHalfRes |= active && kvp.Value.NeedsHalfRes(); } if (!anyActive) return; var cmd = context.command; cmd.BeginSample("Monitors"); if (needsHalfRes) { cmd.GetTemporaryRT(ShaderIDs.HalfResFinalCopy, context.width / 2, context.height / 2, 0, FilterMode.Bilinear, context.sourceFormat); cmd.Blit(context.destination, ShaderIDs.HalfResFinalCopy); } foreach (var kvp in m_Monitors) { var monitor = kvp.Value; if (monitor.requested) monitor.Render(context); } if (needsHalfRes) cmd.ReleaseTemporaryRT(ShaderIDs.HalfResFinalCopy); cmd.EndSample("Monitors"); } internal void RenderSpecialOverlays(PostProcessRenderContext context) { if (debugOverlay == DebugOverlay.Depth) { var sheet = context.propertySheets.Get(context.resources.shaders.debugOverlays); PushDebugOverlay(context.command, BuiltinRenderTextureType.None, sheet, 0); } else if (debugOverlay == DebugOverlay.Normals) { var sheet = context.propertySheets.Get(context.resources.shaders.debugOverlays); sheet.ClearKeywords(); if (context.camera.actualRenderingPath == RenderingPath.DeferredLighting) sheet.EnableKeyword("SOURCE_GBUFFER"); PushDebugOverlay(context.command, BuiltinRenderTextureType.None, sheet, 1); } else if (debugOverlay == DebugOverlay.MotionVectors) { var sheet = context.propertySheets.Get(context.resources.shaders.debugOverlays); sheet.properties.SetVector(ShaderIDs.Params, new Vector4(overlaySettings.motionColorIntensity, overlaySettings.motionGridSize, 0f, 0f)); PushDebugOverlay(context.command, context.source, sheet, 2); } else if (debugOverlay == DebugOverlay.NANTracker) { var sheet = context.propertySheets.Get(context.resources.shaders.debugOverlays); PushDebugOverlay(context.command, context.source, sheet, 3); } else if (debugOverlay == DebugOverlay.ColorBlindnessSimulation) { var sheet = context.propertySheets.Get(context.resources.shaders.debugOverlays); sheet.properties.SetVector(ShaderIDs.Params, new Vector4(overlaySettings.colorBlindnessStrength, 0f, 0f, 0f)); PushDebugOverlay(context.command, context.source, sheet, 4 + (int)overlaySettings.colorBlindnessType); } } internal void EndFrame() { foreach (var kvp in m_Monitors) kvp.Value.requested = false; if (!debugOverlayActive) DestroyDebugOverlayTarget(); debugOverlay = DebugOverlay.None; } } }
246
orrb
openai
C#
namespace UnityEngine.Rendering.PostProcessing { public abstract class PostProcessEffectRenderer { protected bool m_ResetHistory = true; // Called when the renderer is created. Settings will be set before `Init` is called. public virtual void Init() { } // Unused with scriptable render pipelines public virtual DepthTextureMode GetCameraFlags() { return DepthTextureMode.None; } public virtual void ResetHistory() { m_ResetHistory = true; } public virtual void Release() { ResetHistory(); } public abstract void Render(PostProcessRenderContext context); internal abstract void SetSettings(PostProcessEffectSettings settings); } public abstract class PostProcessEffectRenderer<T> : PostProcessEffectRenderer where T : PostProcessEffectSettings { public T settings { get; internal set; } internal override void SetSettings(PostProcessEffectSettings settings) { this.settings = (T)settings; } } }
44
orrb
openai
C#
using System; using System.Collections.ObjectModel; using System.Reflection; using System.Linq; namespace UnityEngine.Rendering.PostProcessing { [Serializable] public class PostProcessEffectSettings : ScriptableObject { // Used to control the state of this override - handy to quickly turn a volume override // on & off in the editor public bool active = true; // This is the true state of the effect override in the stack - so you can disable a lower // priority effect by pushing a higher priority effect with enabled set to false. public BoolParameter enabled = new BoolParameter { overrideState = true, value = false }; internal ReadOnlyCollection<ParameterOverride> parameters; void OnEnable() { // Automatically grab all fields of type ParameterOverride for this instance parameters = GetType() .GetFields(BindingFlags.Public | BindingFlags.Instance) .Where(t => t.FieldType.IsSubclassOf(typeof(ParameterOverride))) .OrderBy(t => t.MetadataToken) // Guaranteed order .Select(t => (ParameterOverride)t.GetValue(this)) .ToList() .AsReadOnly(); foreach (var parameter in parameters) parameter.OnEnable(); } void OnDisable() { if (parameters == null) return; foreach (var parameter in parameters) parameter.OnDisable(); } public void SetAllOverridesTo(bool state, bool excludeEnabled = true) { foreach (var prop in parameters) { if (excludeEnabled && prop == enabled) continue; prop.overrideState = state; } } public virtual bool IsEnabledAndSupported(PostProcessRenderContext context) { return enabled.value; } // Custom hashing function used to compare the state of settings (it's not meant to be // unique but to be a quick way to check if two setting sets have the same state or not). // Hash collision rate should be pretty low. public int GetHash() { unchecked { //return parameters.Aggregate(17, (i, p) => i * 23 + p.GetHash()); int hash = 17; foreach (var p in parameters) hash = hash * 23 + p.GetHash(); return hash; } } } }
80
orrb
openai
C#
using System.Collections.Generic; namespace UnityEngine.Rendering.PostProcessing { public enum PostProcessEvent { BeforeTransparent = 0, BeforeStack = 1, AfterStack = 2, } // Box free comparer for our `PostProcessEvent` enum, else the runtime will box the type when // used as a key in a dictionary, thus leading to garbage generation... *sigh* public struct PostProcessEventComparer : IEqualityComparer<PostProcessEvent> { public bool Equals(PostProcessEvent x, PostProcessEvent y) { return x == y; } public int GetHashCode(PostProcessEvent obj) { return (int)obj; } } }
27
orrb
openai
C#
using System; using System.Collections.Generic; using System.Linq; using UnityEngine.Assertions; namespace UnityEngine.Rendering.PostProcessing { #if UNITY_2017_2_OR_NEWER using XRSettings = UnityEngine.XR.XRSettings; #elif UNITY_5_6_OR_NEWER using XRSettings = UnityEngine.VR.VRSettings; #endif // TODO: XMLDoc everything (?) [DisallowMultipleComponent, ExecuteInEditMode, ImageEffectAllowedInSceneView] [AddComponentMenu("Rendering/Post-process Layer", 1000)] [RequireComponent(typeof(Camera))] public sealed class PostProcessLayer : MonoBehaviour { public enum Antialiasing { None, FastApproximateAntialiasing, SubpixelMorphologicalAntialiasing, TemporalAntialiasing } // Settings public Transform volumeTrigger; public LayerMask volumeLayer; public bool stopNaNPropagation = true; // Builtins / hardcoded effects that don't benefit from volume blending public Antialiasing antialiasingMode = Antialiasing.None; public TemporalAntialiasing temporalAntialiasing; public SubpixelMorphologicalAntialiasing subpixelMorphologicalAntialiasing; public FastApproximateAntialiasing fastApproximateAntialiasing; public Fog fog; public Dithering dithering; public PostProcessDebugLayer debugLayer; [SerializeField] PostProcessResources m_Resources; // UI states [SerializeField] bool m_ShowToolkit; [SerializeField] bool m_ShowCustomSorter; // Will stop applying post-processing effects just before color grading is applied // Currently used to export to exr without color grading public bool breakBeforeColorGrading = false; // Pre-ordered custom user effects // These are automatically populated and made to work properly with the serialization // system AND the editor. Modify at your own risk. [Serializable] public sealed class SerializedBundleRef { // We can't serialize Type so use assemblyQualifiedName instead, we only need this at // init time anyway so it's fine public string assemblyQualifiedName; // Not serialized, is set/reset when deserialization kicks in public PostProcessBundle bundle; } [SerializeField] List<SerializedBundleRef> m_BeforeTransparentBundles; [SerializeField] List<SerializedBundleRef> m_BeforeStackBundles; [SerializeField] List<SerializedBundleRef> m_AfterStackBundles; public Dictionary<PostProcessEvent, List<SerializedBundleRef>> sortedBundles { get; private set; } // We need to keep track of bundle initialization because for some obscure reason, on // assembly reload a MonoBehavior's Editor OnEnable will be called BEFORE the MonoBehavior's // own OnEnable... So we'll use it to pre-init bundles if the layer inspector is opened and // the component hasn't been enabled yet. public bool haveBundlesBeenInited { get; private set; } // Settings/Renderer bundles mapped to settings types Dictionary<Type, PostProcessBundle> m_Bundles; PropertySheetFactory m_PropertySheetFactory; CommandBuffer m_LegacyCmdBufferBeforeReflections; CommandBuffer m_LegacyCmdBufferBeforeLighting; CommandBuffer m_LegacyCmdBufferOpaque; CommandBuffer m_LegacyCmdBuffer; Camera m_Camera; PostProcessRenderContext m_CurrentContext; LogHistogram m_LogHistogram; bool m_SettingsUpdateNeeded = true; bool m_IsRenderingInSceneView = false; TargetPool m_TargetPool; bool m_NaNKilled = false; // Recycled list - used to reduce GC stress when gathering active effects in a bundle list // on each frame readonly List<PostProcessEffectRenderer> m_ActiveEffects = new List<PostProcessEffectRenderer>(); readonly List<RenderTargetIdentifier> m_Targets = new List<RenderTargetIdentifier>(); void OnEnable() { Init(null); if (!haveBundlesBeenInited) InitBundles(); m_LogHistogram = new LogHistogram(); m_PropertySheetFactory = new PropertySheetFactory(); m_TargetPool = new TargetPool(); debugLayer.OnEnable(); if (RuntimeUtilities.scriptableRenderPipelineActive) return; InitLegacy(); } void InitLegacy() { m_LegacyCmdBufferBeforeReflections = new CommandBuffer { name = "Deferred Ambient Occlusion" }; m_LegacyCmdBufferBeforeLighting = new CommandBuffer { name = "Deferred Ambient Occlusion" }; m_LegacyCmdBufferOpaque = new CommandBuffer { name = "Opaque Only Post-processing" }; m_LegacyCmdBuffer = new CommandBuffer { name = "Post-processing" }; m_Camera = GetComponent<Camera>(); m_Camera.forceIntoRenderTexture = true; // Needed when running Forward / LDR / No MSAA m_Camera.AddCommandBuffer(CameraEvent.BeforeReflections, m_LegacyCmdBufferBeforeReflections); m_Camera.AddCommandBuffer(CameraEvent.BeforeLighting, m_LegacyCmdBufferBeforeLighting); m_Camera.AddCommandBuffer(CameraEvent.BeforeImageEffectsOpaque, m_LegacyCmdBufferOpaque); m_Camera.AddCommandBuffer(CameraEvent.BeforeImageEffects, m_LegacyCmdBuffer); // Internal context used if no SRP is set m_CurrentContext = new PostProcessRenderContext(); } public void Init(PostProcessResources resources) { if (resources != null) m_Resources = resources; RuntimeUtilities.CreateIfNull(ref temporalAntialiasing); RuntimeUtilities.CreateIfNull(ref subpixelMorphologicalAntialiasing); RuntimeUtilities.CreateIfNull(ref fastApproximateAntialiasing); RuntimeUtilities.CreateIfNull(ref dithering); RuntimeUtilities.CreateIfNull(ref fog); RuntimeUtilities.CreateIfNull(ref debugLayer); } public void InitBundles() { if (haveBundlesBeenInited) return; // Create these lists only once, the serialization system will take over after that RuntimeUtilities.CreateIfNull(ref m_BeforeTransparentBundles); RuntimeUtilities.CreateIfNull(ref m_BeforeStackBundles); RuntimeUtilities.CreateIfNull(ref m_AfterStackBundles); // Create a bundle for each effect type m_Bundles = new Dictionary<Type, PostProcessBundle>(); foreach (var type in PostProcessManager.instance.settingsTypes.Keys) { var settings = (PostProcessEffectSettings)ScriptableObject.CreateInstance(type); var bundle = new PostProcessBundle(settings); m_Bundles.Add(type, bundle); } // Update sorted lists with newly added or removed effects in the assemblies UpdateBundleSortList(m_BeforeTransparentBundles, PostProcessEvent.BeforeTransparent); UpdateBundleSortList(m_BeforeStackBundles, PostProcessEvent.BeforeStack); UpdateBundleSortList(m_AfterStackBundles, PostProcessEvent.AfterStack); // Push all sorted lists in a dictionary for easier access sortedBundles = new Dictionary<PostProcessEvent, List<SerializedBundleRef>>(new PostProcessEventComparer()) { { PostProcessEvent.BeforeTransparent, m_BeforeTransparentBundles }, { PostProcessEvent.BeforeStack, m_BeforeStackBundles }, { PostProcessEvent.AfterStack, m_AfterStackBundles } }; // Done haveBundlesBeenInited = true; } void UpdateBundleSortList(List<SerializedBundleRef> sortedList, PostProcessEvent evt) { // First get all effects associated with the injection point var effects = m_Bundles.Where(kvp => kvp.Value.attribute.eventType == evt && !kvp.Value.attribute.builtinEffect) .Select(kvp => kvp.Value) .ToList(); // Remove types that don't exist anymore sortedList.RemoveAll(x => { string searchStr = x.assemblyQualifiedName; return !effects.Exists(b => b.settings.GetType().AssemblyQualifiedName == searchStr); }); // Add new ones foreach (var effect in effects) { string typeName = effect.settings.GetType().AssemblyQualifiedName; if (!sortedList.Exists(b => b.assemblyQualifiedName == typeName)) { var sbr = new SerializedBundleRef { assemblyQualifiedName = typeName }; sortedList.Add(sbr); } } // Link internal references foreach (var effect in sortedList) { string typeName = effect.assemblyQualifiedName; var bundle = effects.Find(b => b.settings.GetType().AssemblyQualifiedName == typeName); effect.bundle = bundle; } } void OnDisable() { if (!RuntimeUtilities.scriptableRenderPipelineActive) { m_Camera.RemoveCommandBuffer(CameraEvent.BeforeReflections, m_LegacyCmdBufferBeforeReflections); m_Camera.RemoveCommandBuffer(CameraEvent.BeforeLighting, m_LegacyCmdBufferBeforeLighting); m_Camera.RemoveCommandBuffer(CameraEvent.BeforeImageEffectsOpaque, m_LegacyCmdBufferOpaque); m_Camera.RemoveCommandBuffer(CameraEvent.BeforeImageEffects, m_LegacyCmdBuffer); } temporalAntialiasing.Release(); m_LogHistogram.Release(); foreach (var bundle in m_Bundles.Values) bundle.Release(); m_Bundles.Clear(); m_PropertySheetFactory.Release(); if (debugLayer != null) debugLayer.OnDisable(); // Might be an issue if several layers are blending in the same frame... TextureLerper.instance.Clear(); haveBundlesBeenInited = false; } // Called everytime the user resets the component from the inspector and more importantly // the first time it's added to a GameObject. As we don't have added/removed event for // components, this will do fine void Reset() { volumeTrigger = transform; } void OnPreCull() { // Unused in scriptable render pipelines if (RuntimeUtilities.scriptableRenderPipelineActive) return; if (m_Camera == null || m_CurrentContext == null) InitLegacy(); // Resets the projection matrix from previous frame in case TAA was enabled. // We also need to force reset the non-jittered projection matrix here as it's not done // when ResetProjectionMatrix() is called and will break transparent rendering if TAA // is switched off and the FOV or any other camera property changes. m_Camera.ResetProjectionMatrix(); m_Camera.nonJitteredProjectionMatrix = m_Camera.projectionMatrix; if (m_Camera.stereoEnabled) { m_Camera.ResetStereoProjectionMatrices(); Shader.SetGlobalFloat(ShaderIDs.RenderViewportScaleFactor, XRSettings.renderViewportScale); } else { Shader.SetGlobalFloat(ShaderIDs.RenderViewportScaleFactor, 1.0f); } BuildCommandBuffers(); } void OnPreRender() { // Unused in scriptable render pipelines // Only needed for multi-pass stereo right eye if (RuntimeUtilities.scriptableRenderPipelineActive || (m_Camera.stereoActiveEye != Camera.MonoOrStereoscopicEye.Right)) return; BuildCommandBuffers(); } void BuildCommandBuffers() { var context = m_CurrentContext; var sourceFormat = m_Camera.allowHDR ? RenderTextureFormat.DefaultHDR : RenderTextureFormat.Default; context.Reset(); context.camera = m_Camera; context.sourceFormat = sourceFormat; // TODO: Investigate retaining command buffers on XR multi-pass right eye m_LegacyCmdBufferBeforeReflections.Clear(); m_LegacyCmdBufferBeforeLighting.Clear(); m_LegacyCmdBufferOpaque.Clear(); m_LegacyCmdBuffer.Clear(); SetupContext(context); context.command = m_LegacyCmdBufferOpaque; UpdateSettingsIfNeeded(context); // Lighting & opaque-only effects var aoBundle = GetBundle<AmbientOcclusion>(); var aoSettings = aoBundle.CastSettings<AmbientOcclusion>(); var aoRenderer = aoBundle.CastRenderer<AmbientOcclusionRenderer>(); bool aoSupported = aoSettings.IsEnabledAndSupported(context); bool aoAmbientOnly = aoRenderer.IsAmbientOnly(context); bool isAmbientOcclusionDeferred = aoSupported && aoAmbientOnly; bool isAmbientOcclusionOpaque = aoSupported && !aoAmbientOnly; var ssrBundle = GetBundle<ScreenSpaceReflections>(); var ssrSettings = ssrBundle.settings; var ssrRenderer = ssrBundle.renderer; bool isScreenSpaceReflectionsActive = ssrSettings.IsEnabledAndSupported(context); // Ambient-only AO is a special case and has to be done in separate command buffers if (isAmbientOcclusionDeferred) { var ao = aoRenderer.Get(); // Render as soon as possible - should be done async in SRPs when available context.command = m_LegacyCmdBufferBeforeReflections; ao.RenderAmbientOnly(context); // Composite with GBuffer right before the lighting pass context.command = m_LegacyCmdBufferBeforeLighting; ao.CompositeAmbientOnly(context); } else if (isAmbientOcclusionOpaque) { context.command = m_LegacyCmdBufferOpaque; aoRenderer.Get().RenderAfterOpaque(context); } bool isFogActive = fog.IsEnabledAndSupported(context); bool hasCustomOpaqueOnlyEffects = HasOpaqueOnlyEffects(context); int opaqueOnlyEffects = 0; opaqueOnlyEffects += isScreenSpaceReflectionsActive ? 1 : 0; opaqueOnlyEffects += isFogActive ? 1 : 0; opaqueOnlyEffects += hasCustomOpaqueOnlyEffects ? 1 : 0; // This works on right eye because it is resolved/populated at runtime var cameraTarget = new RenderTargetIdentifier(BuiltinRenderTextureType.CameraTarget); if (opaqueOnlyEffects > 0) { var cmd = m_LegacyCmdBufferOpaque; context.command = cmd; // We need to use the internal Blit method to copy the camera target or it'll fail // on tiled GPU as it won't be able to resolve int tempTarget0 = m_TargetPool.Get(); context.GetScreenSpaceTemporaryRT(cmd, tempTarget0, 24, sourceFormat); cmd.Blit(cameraTarget, tempTarget0); context.source = tempTarget0; int tempTarget1 = -1; if (opaqueOnlyEffects > 1) { tempTarget1 = m_TargetPool.Get(); context.GetScreenSpaceTemporaryRT(cmd, tempTarget1, 24, sourceFormat); context.destination = tempTarget1; } else context.destination = cameraTarget; if (isScreenSpaceReflectionsActive) { ssrRenderer.Render(context); opaqueOnlyEffects--; var prevSource = context.source; context.source = context.destination; context.destination = opaqueOnlyEffects == 1 ? cameraTarget : prevSource; } if (isFogActive) { fog.Render(context); opaqueOnlyEffects--; var prevSource = context.source; context.source = context.destination; context.destination = opaqueOnlyEffects == 1 ? cameraTarget : prevSource; } if (hasCustomOpaqueOnlyEffects) RenderOpaqueOnly(context); if (opaqueOnlyEffects > 1) cmd.ReleaseTemporaryRT(tempTarget1); cmd.ReleaseTemporaryRT(tempTarget0); } // Post-transparency stack // Same as before, first blit needs to use the builtin Blit command to properly handle // tiled GPUs int tempRt = m_TargetPool.Get(); context.GetScreenSpaceTemporaryRT(m_LegacyCmdBuffer, tempRt, 24, sourceFormat, RenderTextureReadWrite.sRGB); m_LegacyCmdBuffer.Blit(cameraTarget, tempRt, RuntimeUtilities.copyStdMaterial, stopNaNPropagation ? 1 : 0); m_NaNKilled = stopNaNPropagation; context.command = m_LegacyCmdBuffer; context.source = tempRt; context.destination = cameraTarget; Render(context); m_LegacyCmdBuffer.ReleaseTemporaryRT(tempRt); } void OnPostRender() { // Unused in scriptable render pipelines if (RuntimeUtilities.scriptableRenderPipelineActive) return; if (m_CurrentContext.IsTemporalAntialiasingActive()) { m_Camera.ResetProjectionMatrix(); if (m_CurrentContext.stereoActive) { if (RuntimeUtilities.isSinglePassStereoEnabled || m_Camera.stereoActiveEye == Camera.MonoOrStereoscopicEye.Right) m_Camera.ResetStereoProjectionMatrices(); } } } public PostProcessBundle GetBundle<T>() where T : PostProcessEffectSettings { return GetBundle(typeof(T)); } public PostProcessBundle GetBundle(Type settingsType) { Assert.IsTrue(m_Bundles.ContainsKey(settingsType), "Invalid type"); return m_Bundles[settingsType]; } public T GetSettings<T>() where T : PostProcessEffectSettings { return GetBundle<T>().CastSettings<T>(); } public void BakeMSVOMap(CommandBuffer cmd, Camera camera, RenderTargetIdentifier destination, RenderTargetIdentifier? depthMap, bool invert) { var bundle = GetBundle<AmbientOcclusion>(); var renderer = bundle.CastRenderer<AmbientOcclusionRenderer>().GetMultiScaleVO(); renderer.SetResources(m_Resources); renderer.GenerateAOMap(cmd, camera, destination, depthMap, invert); } internal void OverrideSettings(List<PostProcessEffectSettings> baseSettings, float interpFactor) { // Go through all settings & overriden parameters for the given volume and lerp values foreach (var settings in baseSettings) { if (!settings.active) continue; var target = GetBundle(settings.GetType()).settings; int count = settings.parameters.Count; for (int i = 0; i < count; i++) { var toParam = settings.parameters[i]; if (toParam.overrideState) { var fromParam = target.parameters[i]; fromParam.Interp(fromParam, toParam, interpFactor); } } } } // In the legacy render loop you have to explicitely set flags on camera to tell that you // need depth, depth+normals or motion vectors... This won't have any effect with most // scriptable render pipelines. void SetLegacyCameraFlags(PostProcessRenderContext context) { var flags = context.camera.depthTextureMode; foreach (var bundle in m_Bundles) { if (bundle.Value.settings.IsEnabledAndSupported(context)) flags |= bundle.Value.renderer.GetCameraFlags(); } // Special case for AA & lighting effects if (context.IsTemporalAntialiasingActive()) flags |= temporalAntialiasing.GetCameraFlags(); if (fog.IsEnabledAndSupported(context)) flags |= fog.GetCameraFlags(); if (debugLayer.debugOverlay != DebugOverlay.None) flags |= debugLayer.GetCameraFlags(); context.camera.depthTextureMode = flags; } // Call this function whenever you need to reset any temporal effect (TAA, Motion Blur etc). // Mainly used when doing camera cuts. public void ResetHistory() { foreach (var bundle in m_Bundles) bundle.Value.ResetHistory(); temporalAntialiasing.ResetHistory(); } public bool HasOpaqueOnlyEffects(PostProcessRenderContext context) { return HasActiveEffects(PostProcessEvent.BeforeTransparent, context); } public bool HasActiveEffects(PostProcessEvent evt, PostProcessRenderContext context) { var list = sortedBundles[evt]; foreach (var item in list) { if (item.bundle.settings.IsEnabledAndSupported(context)) return true; } return false; } void SetupContext(PostProcessRenderContext context) { m_IsRenderingInSceneView = context.camera.cameraType == CameraType.SceneView; context.isSceneView = m_IsRenderingInSceneView; context.resources = m_Resources; context.propertySheets = m_PropertySheetFactory; context.debugLayer = debugLayer; context.antialiasing = antialiasingMode; context.temporalAntialiasing = temporalAntialiasing; context.logHistogram = m_LogHistogram; SetLegacyCameraFlags(context); // Prepare debug overlay debugLayer.SetFrameSize(context.width, context.height); // Unsafe to keep this around but we need it for OnGUI events for debug views // Will be removed eventually m_CurrentContext = context; } void UpdateSettingsIfNeeded(PostProcessRenderContext context) { if (m_SettingsUpdateNeeded) { context.command.BeginSample("VolumeBlending"); PostProcessManager.instance.UpdateSettings(this); context.command.EndSample("VolumeBlending"); m_TargetPool.Reset(); // TODO: fix me once VR support is in SRP // Needed in SRP so that _RenderViewportScaleFactor isn't 0 if (RuntimeUtilities.scriptableRenderPipelineActive) Shader.SetGlobalFloat(ShaderIDs.RenderViewportScaleFactor, 1f); } m_SettingsUpdateNeeded = false; } // Renders before-transparent effects. // Make sure you check `HasOpaqueOnlyEffects()` before calling this method as it won't // automatically blit source into destination if no opaque effects are active. public void RenderOpaqueOnly(PostProcessRenderContext context) { if (RuntimeUtilities.scriptableRenderPipelineActive) SetupContext(context); TextureLerper.instance.BeginFrame(context); // Update & override layer settings first (volume blending), will only be done once per // frame, either here or in Render() if there isn't any opaque-only effect to render. UpdateSettingsIfNeeded(context); RenderList(sortedBundles[PostProcessEvent.BeforeTransparent], context, "OpaqueOnly"); } // Renders everything not opaque-only // // Current order of operation is as following: // 1. Pre-stack // 2. Built-in stack // 3. Post-stack // 4. Built-in final pass // // Final pass should be skipped when outputting to a HDR display. public void Render(PostProcessRenderContext context) { if (RuntimeUtilities.scriptableRenderPipelineActive) SetupContext(context); TextureLerper.instance.BeginFrame(context); var cmd = context.command; // Update & override layer settings first (volume blending) if the opaque only pass // hasn't been called this frame. UpdateSettingsIfNeeded(context); // Do a NaN killing pass if needed int lastTarget = -1; if (stopNaNPropagation && !m_NaNKilled) { lastTarget = m_TargetPool.Get(); context.GetScreenSpaceTemporaryRT(cmd, lastTarget, 24, context.sourceFormat); cmd.BlitFullscreenTriangle(context.source, lastTarget, RuntimeUtilities.copySheet, 1); context.source = lastTarget; m_NaNKilled = true; } // Do temporal anti-aliasing first if (context.IsTemporalAntialiasingActive()) { if (!RuntimeUtilities.scriptableRenderPipelineActive) { if (context.stereoActive) { // We only need to configure all of this once for stereo, during OnPreCull if (context.camera.stereoActiveEye != Camera.MonoOrStereoscopicEye.Right) temporalAntialiasing.ConfigureStereoJitteredProjectionMatrices(context); } else { temporalAntialiasing.ConfigureJitteredProjectionMatrix(context); } } var taaTarget = m_TargetPool.Get(); var finalDestination = context.destination; context.GetScreenSpaceTemporaryRT(cmd, taaTarget, 24, context.sourceFormat); context.destination = taaTarget; temporalAntialiasing.Render(context); context.source = taaTarget; context.destination = finalDestination; if (lastTarget > -1) cmd.ReleaseTemporaryRT(lastTarget); lastTarget = taaTarget; } bool hasBeforeStackEffects = HasActiveEffects(PostProcessEvent.BeforeStack, context); bool hasAfterStackEffects = HasActiveEffects(PostProcessEvent.AfterStack, context) && !breakBeforeColorGrading; bool needsFinalPass = (hasAfterStackEffects || (antialiasingMode == Antialiasing.FastApproximateAntialiasing) || (antialiasingMode == Antialiasing.SubpixelMorphologicalAntialiasing && subpixelMorphologicalAntialiasing.IsSupported())) && !breakBeforeColorGrading; // Right before the builtin stack if (hasBeforeStackEffects) lastTarget = RenderInjectionPoint(PostProcessEvent.BeforeStack, context, "BeforeStack", lastTarget); // Builtin stack lastTarget = RenderBuiltins(context, !needsFinalPass, lastTarget); // After the builtin stack but before the final pass (before FXAA & Dithering) if (hasAfterStackEffects) lastTarget = RenderInjectionPoint(PostProcessEvent.AfterStack, context, "AfterStack", lastTarget); // And close with the final pass if (needsFinalPass) RenderFinalPass(context, lastTarget); // Render debug monitors & overlay if requested debugLayer.RenderSpecialOverlays(context); debugLayer.RenderMonitors(context); // End frame cleanup TextureLerper.instance.EndFrame(); debugLayer.EndFrame(); m_SettingsUpdateNeeded = true; m_NaNKilled = false; } int RenderInjectionPoint(PostProcessEvent evt, PostProcessRenderContext context, string marker, int releaseTargetAfterUse = -1) { int tempTarget = m_TargetPool.Get(); var finalDestination = context.destination; var cmd = context.command; context.GetScreenSpaceTemporaryRT(cmd, tempTarget, 24, context.sourceFormat); context.destination = tempTarget; RenderList(sortedBundles[evt], context, marker); context.source = tempTarget; context.destination = finalDestination; if (releaseTargetAfterUse > -1) cmd.ReleaseTemporaryRT(releaseTargetAfterUse); return tempTarget; } void RenderList(List<SerializedBundleRef> list, PostProcessRenderContext context, string marker) { var cmd = context.command; cmd.BeginSample(marker); // First gather active effects - we need this to manage render targets more efficiently m_ActiveEffects.Clear(); for (int i = 0; i < list.Count; i++) { var effect = list[i].bundle; if (effect.settings.IsEnabledAndSupported(context)) { if (!context.isSceneView || (context.isSceneView && effect.attribute.allowInSceneView)) m_ActiveEffects.Add(effect.renderer); } } int count = m_ActiveEffects.Count; // If there's only one active effect, we can simply execute it and skip the rest if (count == 1) { m_ActiveEffects[0].Render(context); } else { // Else create the target chain m_Targets.Clear(); m_Targets.Add(context.source); // First target is always source int tempTarget1 = m_TargetPool.Get(); int tempTarget2 = m_TargetPool.Get(); for (int i = 0; i < count - 1; i++) m_Targets.Add(i % 2 == 0 ? tempTarget1 : tempTarget2); m_Targets.Add(context.destination); // Last target is always destination // Render context.GetScreenSpaceTemporaryRT(cmd, tempTarget1, 24, context.sourceFormat); if (count > 2) context.GetScreenSpaceTemporaryRT(cmd, tempTarget2, 24, context.sourceFormat); for (int i = 0; i < count; i++) { context.source = m_Targets[i]; context.destination = m_Targets[i + 1]; m_ActiveEffects[i].Render(context); } cmd.ReleaseTemporaryRT(tempTarget1); if (count > 2) cmd.ReleaseTemporaryRT(tempTarget2); } cmd.EndSample(marker); } int RenderBuiltins(PostProcessRenderContext context, bool isFinalPass, int releaseTargetAfterUse = -1) { var uberSheet = context.propertySheets.Get(context.resources.shaders.uber); uberSheet.ClearKeywords(); uberSheet.properties.Clear(); context.uberSheet = uberSheet; context.autoExposureTexture = RuntimeUtilities.whiteTexture; context.bloomBufferNameID = -1; var cmd = context.command; cmd.BeginSample("BuiltinStack"); int tempTarget = -1; var finalDestination = context.destination; if (!isFinalPass) { // Render to an intermediate target as this won't be the final pass tempTarget = m_TargetPool.Get(); context.GetScreenSpaceTemporaryRT(cmd, tempTarget, 24, context.sourceFormat); context.destination = tempTarget; // Handle FXAA's keep alpha mode if (antialiasingMode == Antialiasing.FastApproximateAntialiasing && !fastApproximateAntialiasing.keepAlpha) uberSheet.properties.SetFloat(ShaderIDs.LumaInAlpha, 1f); } // Depth of field final combination pass used to be done in Uber which led to artifacts // when used at the same time as Bloom (because both effects used the same source, so // the stronger bloom was, the more DoF was eaten away in out of focus areas) int depthOfFieldTarget = RenderEffect<DepthOfField>(context, true); // Motion blur is a separate pass - could potentially be done after DoF depending on the // kind of results you're looking for... int motionBlurTarget = RenderEffect<MotionBlur>(context, true); // Prepare exposure histogram if needed if (ShouldGenerateLogHistogram(context)) m_LogHistogram.Generate(context); // Uber effects RenderEffect<AutoExposure>(context); uberSheet.properties.SetTexture(ShaderIDs.AutoExposureTex, context.autoExposureTexture); RenderEffect<ChromaticAberration>(context); RenderEffect<Bloom>(context); RenderEffect<Vignette>(context); RenderEffect<Grain>(context); if (!breakBeforeColorGrading) RenderEffect<ColorGrading>(context); int pass = 0; if (isFinalPass) { uberSheet.EnableKeyword("FINALPASS"); dithering.Render(context); if (context.flip && !context.isSceneView) pass = 1; } cmd.BlitFullscreenTriangle(context.source, context.destination, uberSheet, pass); context.source = context.destination; context.destination = finalDestination; if (releaseTargetAfterUse > -1) cmd.ReleaseTemporaryRT(releaseTargetAfterUse); if (motionBlurTarget > -1) cmd.ReleaseTemporaryRT(motionBlurTarget); if (depthOfFieldTarget > -1) cmd.ReleaseTemporaryRT(motionBlurTarget); if (context.bloomBufferNameID > -1) cmd.ReleaseTemporaryRT(context.bloomBufferNameID); cmd.EndSample("BuiltinStack"); return tempTarget; } // This pass will have to be disabled for HDR screen output as it's an LDR pass void RenderFinalPass(PostProcessRenderContext context, int releaseTargetAfterUse = -1) { var cmd = context.command; cmd.BeginSample("FinalPass"); if (breakBeforeColorGrading) { var sheet = context.propertySheets.Get(context.resources.shaders.discardAlpha); cmd.BlitFullscreenTriangle(context.source, context.destination, sheet, 0); } else { var uberSheet = context.propertySheets.Get(context.resources.shaders.finalPass); uberSheet.ClearKeywords(); uberSheet.properties.Clear(); context.uberSheet = uberSheet; int tempTarget = -1; if (antialiasingMode == Antialiasing.FastApproximateAntialiasing) { uberSheet.EnableKeyword(fastApproximateAntialiasing.fastMode ? "FXAA_LOW" : "FXAA" ); if (fastApproximateAntialiasing.keepAlpha) uberSheet.EnableKeyword("FXAA_KEEP_ALPHA"); } else if (antialiasingMode == Antialiasing.SubpixelMorphologicalAntialiasing && subpixelMorphologicalAntialiasing.IsSupported()) { tempTarget = m_TargetPool.Get(); var finalDestination = context.destination; context.GetScreenSpaceTemporaryRT(context.command, tempTarget, 24, context.sourceFormat); context.destination = tempTarget; subpixelMorphologicalAntialiasing.Render(context); context.source = tempTarget; context.destination = finalDestination; } dithering.Render(context); cmd.BlitFullscreenTriangle(context.source, context.destination, uberSheet, (context.flip && !context.isSceneView) ? 1 : 0); if (tempTarget > -1) cmd.ReleaseTemporaryRT(tempTarget); } if (releaseTargetAfterUse > -1) cmd.ReleaseTemporaryRT(releaseTargetAfterUse); cmd.EndSample("FinalPass"); } int RenderEffect<T>(PostProcessRenderContext context, bool useTempTarget = false) where T : PostProcessEffectSettings { var effect = GetBundle<T>(); if (!effect.settings.IsEnabledAndSupported(context)) return -1; if (m_IsRenderingInSceneView && !effect.attribute.allowInSceneView) return -1; if (!useTempTarget) { effect.renderer.Render(context); return -1; } var finalDestination = context.destination; var tempTarget = m_TargetPool.Get(); context.GetScreenSpaceTemporaryRT(context.command, tempTarget, 24, context.sourceFormat); context.destination = tempTarget; effect.renderer.Render(context); context.source = tempTarget; context.destination = finalDestination; return tempTarget; } bool ShouldGenerateLogHistogram(PostProcessRenderContext context) { bool autoExpo = GetBundle<AutoExposure>().settings.IsEnabledAndSupported(context); bool lightMeter = debugLayer.lightMeter.IsRequestedAndSupported(); return autoExpo || lightMeter; } } }
948
orrb
openai
C#
using System; using System.Collections.Generic; using System.Linq; using UnityEngine.Assertions; namespace UnityEngine.Rendering.PostProcessing { // Singleton used to tracks all existing volumes in the scene // TODO: Deal with 2D volumes ! public sealed class PostProcessManager { static PostProcessManager s_Instance; public static PostProcessManager instance { get { if (s_Instance == null) s_Instance = new PostProcessManager(); return s_Instance; } } const int k_MaxLayerCount = 32; // Max amount of layers available in Unity readonly Dictionary<int, List<PostProcessVolume>> m_SortedVolumes; readonly List<PostProcessVolume> m_Volumes; readonly Dictionary<int, bool> m_SortNeeded; readonly List<PostProcessEffectSettings> m_BaseSettings; readonly List<Collider> m_TempColliders; public readonly Dictionary<Type, PostProcessAttribute> settingsTypes; PostProcessManager() { m_SortedVolumes = new Dictionary<int, List<PostProcessVolume>>(); m_Volumes = new List<PostProcessVolume>(); m_SortNeeded = new Dictionary<int, bool>(); m_BaseSettings = new List<PostProcessEffectSettings>(); m_TempColliders = new List<Collider>(5); settingsTypes = new Dictionary<Type, PostProcessAttribute>(); ReloadBaseTypes(); } #if UNITY_EDITOR // Called every time Unity recompile scripts in the editor. We need this to keep track of // any new custom effect the user might add to the project [UnityEditor.Callbacks.DidReloadScripts] static void OnEditorReload() { instance.ReloadBaseTypes(); } #endif void CleanBaseTypes() { settingsTypes.Clear(); foreach (var settings in m_BaseSettings) RuntimeUtilities.Destroy(settings); m_BaseSettings.Clear(); } // This will be called only once at runtime and everytime script reload kicks-in in the // editor as we need to keep track of any compatible post-processing effects in the project void ReloadBaseTypes() { CleanBaseTypes(); // Rebuild the base type map var types = RuntimeUtilities.GetAllAssemblyTypes() .Where( t => t.IsSubclassOf(typeof(PostProcessEffectSettings)) && t.IsDefined(typeof(PostProcessAttribute), false) && !t.IsAbstract ); foreach (var type in types) { settingsTypes.Add(type, type.GetAttribute<PostProcessAttribute>()); // Create an instance for each effect type, these will be used for the lowest // priority global volume as we need a default state when exiting volume ranges var inst = (PostProcessEffectSettings)ScriptableObject.CreateInstance(type); inst.SetAllOverridesTo(true, false); m_BaseSettings.Add(inst); } } // Gets a list of all volumes currently affecting the given layer. Results aren't sorted. // Volume with weight of 0 or no profile set will be skipped. Results list won't be cleared. public void GetActiveVolumes(PostProcessLayer layer, List<PostProcessVolume> results, bool skipDisabled = true, bool skipZeroWeight = true) { // If no trigger is set, only global volumes will have influence int mask = layer.volumeLayer.value; var volumeTrigger = layer.volumeTrigger; bool onlyGlobal = volumeTrigger == null; var triggerPos = onlyGlobal ? Vector3.zero : volumeTrigger.position; // Sort the cached volume list(s) for the given layer mask if needed and return it var volumes = GrabVolumes(mask); // Traverse all volumes foreach (var volume in volumes) { // Skip disabled volumes and volumes without any data or weight if ((skipDisabled && !volume.enabled) || volume.profileRef == null || (skipZeroWeight && volume.weight <= 0f)) continue; // Global volume always have influence if (volume.isGlobal) { results.Add(volume); continue; } if (onlyGlobal) continue; // If volume isn't global and has no collider, skip it as it's useless var colliders = m_TempColliders; volume.GetComponents(colliders); if (colliders.Count == 0) continue; // Find closest distance to volume, 0 means it's inside it float closestDistanceSqr = float.PositiveInfinity; foreach (var collider in colliders) { if (!collider.enabled) continue; var closestPoint = collider.ClosestPoint(triggerPos); // 5.6-only API var d = ((closestPoint - triggerPos) / 2f).sqrMagnitude; if (d < closestDistanceSqr) closestDistanceSqr = d; } colliders.Clear(); float blendDistSqr = volume.blendDistance * volume.blendDistance; // Check for influence if (closestDistanceSqr <= blendDistSqr) results.Add(volume); } } public PostProcessVolume GetHighestPriorityVolume(PostProcessLayer layer) { if (layer == null) throw new ArgumentNullException("layer"); return GetHighestPriorityVolume(layer.volumeLayer); } public PostProcessVolume GetHighestPriorityVolume(LayerMask mask) { float highestPriority = float.NegativeInfinity; PostProcessVolume output = null; List<PostProcessVolume> volumes; if (m_SortedVolumes.TryGetValue(mask, out volumes)) { foreach (var volume in volumes) { if (volume.priority > highestPriority) { highestPriority = volume.priority; output = volume; } } } return output; } public PostProcessVolume QuickVolume(int layer, float priority, params PostProcessEffectSettings[] settings) { var gameObject = new GameObject() { name = "Quick Volume", layer = layer, hideFlags = HideFlags.HideAndDontSave }; var volume = gameObject.AddComponent<PostProcessVolume>(); volume.priority = priority; volume.isGlobal = true; var profile = volume.profile; foreach (var s in settings) { Assert.IsNotNull(s, "Trying to create a volume with null effects"); profile.AddSettings(s); } return volume; } internal void SetLayerDirty(int layer) { Assert.IsTrue(layer >= 0 && layer <= k_MaxLayerCount, "Invalid layer bit"); foreach (var kvp in m_SortedVolumes) { var mask = kvp.Key; if ((mask & (1 << layer)) != 0) m_SortNeeded[mask] = true; } } internal void UpdateVolumeLayer(PostProcessVolume volume, int prevLayer, int newLayer) { Assert.IsTrue(prevLayer >= 0 && prevLayer <= k_MaxLayerCount, "Invalid layer bit"); Unregister(volume, prevLayer); Register(volume, newLayer); } void Register(PostProcessVolume volume, int layer) { m_Volumes.Add(volume); // Look for existing cached layer masks and add it there if needed foreach (var kvp in m_SortedVolumes) { var mask = kvp.Key; if ((mask & (1 << layer)) != 0) kvp.Value.Add(volume); } SetLayerDirty(layer); } internal void Register(PostProcessVolume volume) { int layer = volume.gameObject.layer; Register(volume, layer); } void Unregister(PostProcessVolume volume, int layer) { m_Volumes.Remove(volume); foreach (var kvp in m_SortedVolumes) { var mask = kvp.Key; // Skip layer masks this volume doesn't belong to if ((mask & (1 << layer)) == 0) continue; kvp.Value.Remove(volume); } } internal void Unregister(PostProcessVolume volume) { int layer = volume.gameObject.layer; Unregister(volume, layer); } // Faster version of OverrideSettings to force replace values in the global state void ReplaceData(PostProcessLayer postProcessLayer) { foreach (var settings in m_BaseSettings) { var target = postProcessLayer.GetBundle(settings.GetType()).settings; int count = settings.parameters.Count; for (int i = 0; i < count; i++) target.parameters[i].SetValue(settings.parameters[i]); } } internal void UpdateSettings(PostProcessLayer postProcessLayer) { // Reset to base state ReplaceData(postProcessLayer); // If no trigger is set, only global volumes will have influence int mask = postProcessLayer.volumeLayer.value; var volumeTrigger = postProcessLayer.volumeTrigger; bool onlyGlobal = volumeTrigger == null; var triggerPos = onlyGlobal ? Vector3.zero : volumeTrigger.position; // Sort the cached volume list(s) for the given layer mask if needed and return it var volumes = GrabVolumes(mask); // Traverse all volumes foreach (var volume in volumes) { // Skip disabled volumes and volumes without any data or weight if (!volume.enabled || volume.profileRef == null || volume.weight <= 0f) continue; var settings = volume.profileRef.settings; // Global volume always have influence if (volume.isGlobal) { postProcessLayer.OverrideSettings(settings, Mathf.Clamp01(volume.weight)); continue; } if (onlyGlobal) continue; // If volume isn't global and has no collider, skip it as it's useless var colliders = m_TempColliders; volume.GetComponents(colliders); if (colliders.Count == 0) continue; // Find closest distance to volume, 0 means it's inside it float closestDistanceSqr = float.PositiveInfinity; foreach (var collider in colliders) { if (!collider.enabled) continue; var closestPoint = collider.ClosestPoint(triggerPos); // 5.6-only API var d = ((closestPoint - triggerPos) / 2f).sqrMagnitude; if (d < closestDistanceSqr) closestDistanceSqr = d; } colliders.Clear(); float blendDistSqr = volume.blendDistance * volume.blendDistance; // Volume has no influence, ignore it // Note: Volume doesn't do anything when `closestDistanceSqr = blendDistSqr` but // we can't use a >= comparison as blendDistSqr could be set to 0 in which // case volume would have total influence if (closestDistanceSqr > blendDistSqr) continue; // Volume has influence float interpFactor = 1f; if (blendDistSqr > 0f) interpFactor = 1f - (closestDistanceSqr / blendDistSqr); // No need to clamp01 the interpolation factor as it'll always be in [0;1[ range postProcessLayer.OverrideSettings(settings, interpFactor * Mathf.Clamp01(volume.weight)); } } List<PostProcessVolume> GrabVolumes(LayerMask mask) { List<PostProcessVolume> list; if (!m_SortedVolumes.TryGetValue(mask, out list)) { // New layer mask detected, create a new list and cache all the volumes that belong // to this mask in it list = new List<PostProcessVolume>(); foreach (var volume in m_Volumes) { if ((mask & (1 << volume.gameObject.layer)) == 0) continue; list.Add(volume); m_SortNeeded[mask] = true; } m_SortedVolumes.Add(mask, list); } // Check sorting state bool sortNeeded; if (m_SortNeeded.TryGetValue(mask, out sortNeeded) && sortNeeded) { m_SortNeeded[mask] = false; SortByPriority(list); } return list; } // Custom insertion sort. First sort will be slower but after that it'll be faster than // using List<T>.Sort() which is also unstable by nature. // Sort order is ascending. static void SortByPriority(List<PostProcessVolume> volumes) { Assert.IsNotNull(volumes, "Trying to sort volumes of non-initialized layer"); for (int i = 1; i < volumes.Count; i++) { var temp = volumes[i]; int j = i - 1; while (j >= 0 && volumes[j].priority > temp.priority) { volumes[j + 1] = volumes[j]; j--; } volumes[j + 1] = temp; } } } }
412
orrb
openai
C#
using System; using System.Collections.Generic; namespace UnityEngine.Rendering.PostProcessing { public sealed class PostProcessProfile : ScriptableObject { [Tooltip("A list of all settings & overrides.")] public List<PostProcessEffectSettings> settings = new List<PostProcessEffectSettings>(); // Editor only, doesn't have any use outside of it [NonSerialized] public bool isDirty = true; void OnEnable() { // Make sure every setting is valid. If a profile holds a script that doesn't exist // anymore, nuke it to keep the profile clean. Note that if you delete a script that is // currently in use in a profile you'll still get a one-time error in the console, it's // harmless and happens because Unity does a redraw of the editor (and thus the current // frame) before the recompilation step. settings.RemoveAll(x => x == null); } public void Reset() { isDirty = true; } public T AddSettings<T>() where T : PostProcessEffectSettings { return (T)AddSettings(typeof(T)); } public PostProcessEffectSettings AddSettings(Type type) { if (HasSettings(type)) throw new InvalidOperationException("Effect already exists in the stack"); var effect = (PostProcessEffectSettings)CreateInstance(type); effect.hideFlags = HideFlags.HideInInspector | HideFlags.HideInHierarchy; effect.name = type.Name; effect.enabled.value = true; settings.Add(effect); isDirty = true; return effect; } public PostProcessEffectSettings AddSettings(PostProcessEffectSettings effect) { if (HasSettings(settings.GetType())) throw new InvalidOperationException("Effect already exists in the stack"); settings.Add(effect); isDirty = true; return effect; } public void RemoveSettings<T>() where T : PostProcessEffectSettings { RemoveSettings(typeof(T)); } public void RemoveSettings(Type type) { int toRemove = -1; for (int i = 0; i < settings.Count; i++) { if (settings[i].GetType() == type) { toRemove = i; break; } } if (toRemove < 0) throw new InvalidOperationException("Effect doesn't exist in the stack"); settings.RemoveAt(toRemove); isDirty = true; } public bool HasSettings<T>() where T : PostProcessEffectSettings { return HasSettings(typeof(T)); } public bool HasSettings(Type type) { foreach (var setting in settings) { if (setting.GetType() == type) return true; } return false; } public bool TryGetSettings<T>(out T outSetting) where T : PostProcessEffectSettings { var type = typeof(T); outSetting = null; foreach (var setting in settings) { if (setting.GetType() == type) { outSetting = (T)setting; return true; } } return false; } } }
122
orrb
openai
C#
using System.Collections.Generic; namespace UnityEngine.Rendering.PostProcessing { #if UNITY_2017_2_OR_NEWER using XRSettings = UnityEngine.XR.XRSettings; #elif UNITY_5_6_OR_NEWER using XRSettings = UnityEngine.VR.VRSettings; #endif // Context object passed around all post-fx in a frame public sealed class PostProcessRenderContext { // ----------------------------------------------------------------------------------------- // The following should be filled by the render pipeline // Camera currently rendering Camera m_Camera; public Camera camera { get { return m_Camera; } set { m_Camera = value; if (m_Camera.stereoEnabled) { #if UNITY_2017_2_OR_NEWER var xrDesc = XRSettings.eyeTextureDesc; width = xrDesc.width; height = xrDesc.height; m_sourceDescriptor = xrDesc; #else // Single-pass is only supported with 2017.2+ because // that is when XRSettings.eyeTextureDesc is available. // Without it, we don't have a robust method of determining // if we are in single-pass. Users can just double the width // here if they KNOW they are using single-pass. width = XRSettings.eyeTextureWidth; height = XRSettings.eyeTextureHeight; #endif if (m_Camera.stereoActiveEye == Camera.MonoOrStereoscopicEye.Right) xrActiveEye = (int)Camera.StereoscopicEye.Right; screenWidth = XRSettings.eyeTextureWidth; screenHeight = XRSettings.eyeTextureHeight; stereoActive = true; } else { width = m_Camera.pixelWidth; height = m_Camera.pixelHeight; #if UNITY_2017_2_OR_NEWER m_sourceDescriptor.width = width; m_sourceDescriptor.height = height; #endif screenWidth = width; screenHeight = height; stereoActive = false; } } } // The command buffer to fill in public CommandBuffer command { get; set; } // Source target (can't be the same as destination) public RenderTargetIdentifier source { get; set; } // Destination target (can't be the same as source) public RenderTargetIdentifier destination { get; set; } // Texture format used for the source target // We need this to be set explictely as we don't have any way of knowing if we're rendering // using HDR or not as scriptable render pipelines may ignore the HDR toggle on camera // completely public RenderTextureFormat sourceFormat { get; set; } // Should we flip the last pass? public bool flip { get; set; } // ----------------------------------------------------------------------------------------- // The following is auto-populated by the post-processing stack // Contains references to external resources (shaders, builtin textures...) public PostProcessResources resources { get; internal set; } // Property sheet factory handled by the currently active PostProcessLayer public PropertySheetFactory propertySheets { get; internal set; } // Custom user data objects (unused by builtin effects, feel free to store whatever you want // in this dictionary) public Dictionary<string, object> userData { get; private set; } // Reference to the internal debug layer public PostProcessDebugLayer debugLayer { get; internal set; } // Current camera width in pixels public int width { get; private set; } // Current camera height in pixels public int height { get; private set; } // TODO: Change w/h name to texture w/h in order to make // size usages explicit #if UNITY_2017_2_OR_NEWER private RenderTextureDescriptor m_sourceDescriptor; private RenderTextureDescriptor GetDescriptor(int depthBufferBits = 0, RenderTextureFormat colorFormat = RenderTextureFormat.Default, RenderTextureReadWrite readWrite = RenderTextureReadWrite.Default) { var modifiedDesc = new RenderTextureDescriptor(m_sourceDescriptor.width, m_sourceDescriptor.height, m_sourceDescriptor.colorFormat, depthBufferBits); modifiedDesc.dimension = m_sourceDescriptor.dimension; modifiedDesc.volumeDepth = m_sourceDescriptor.volumeDepth; modifiedDesc.vrUsage = m_sourceDescriptor.vrUsage; modifiedDesc.msaaSamples = m_sourceDescriptor.msaaSamples; modifiedDesc.memoryless = m_sourceDescriptor.memoryless; modifiedDesc.useMipMap = m_sourceDescriptor.useMipMap; modifiedDesc.autoGenerateMips = m_sourceDescriptor.autoGenerateMips; modifiedDesc.enableRandomWrite = m_sourceDescriptor.enableRandomWrite; modifiedDesc.shadowSamplingMode = m_sourceDescriptor.shadowSamplingMode; if (colorFormat != RenderTextureFormat.Default) modifiedDesc.colorFormat = colorFormat; if (readWrite != RenderTextureReadWrite.Default) modifiedDesc.sRGB = (readWrite != RenderTextureReadWrite.Linear); return modifiedDesc; } #endif public void GetScreenSpaceTemporaryRT(CommandBuffer cmd, int nameID, int depthBufferBits = 0, RenderTextureFormat colorFormat = RenderTextureFormat.Default, RenderTextureReadWrite readWrite = RenderTextureReadWrite.Default, FilterMode filter = FilterMode.Bilinear, int widthOverride = 0, int heightOverride = 0) { #if UNITY_2017_2_OR_NEWER var desc = GetDescriptor(depthBufferBits, colorFormat, readWrite); if (widthOverride > 0) desc.width = widthOverride; if (heightOverride > 0) desc.height = heightOverride; cmd.GetTemporaryRT(nameID, desc, filter); #else int actualWidth = width; int actualHeight = height; if (widthOverride > 0) actualWidth = widthOverride; if (heightOverride > 0) actualHeight = heightOverride; cmd.GetTemporaryRT(nameID, actualWidth, actualHeight, depthBufferBits, filter, colorFormat, readWrite); // TODO: How to handle MSAA for XR in older versions? Query cam? // TODO: Pass in vrUsage into the args #endif } public RenderTexture GetScreenSpaceTemporaryRT(int depthBufferBits = 0, RenderTextureFormat colorFormat = RenderTextureFormat.Default, RenderTextureReadWrite readWrite = RenderTextureReadWrite.Default, int widthOverride = 0, int heightOverride = 0) { #if UNITY_2017_2_OR_NEWER var desc = GetDescriptor(depthBufferBits, colorFormat, readWrite); if (widthOverride > 0) desc.width = widthOverride; if (heightOverride > 0) desc.height = heightOverride; return RenderTexture.GetTemporary(desc); #else int actualWidth = width; int actualHeight = height; if (widthOverride > 0) actualWidth = widthOverride; if (heightOverride > 0) actualHeight = heightOverride; return RenderTexture.GetTemporary(actualWidth, actualHeight, depthBufferBits, colorFormat, readWrite); #endif } public bool stereoActive { get; private set; } // Current active rendering eye (for XR) public int xrActiveEye { get; private set; } // Pixel dimensions of logical screen size public int screenWidth { get; private set; } public int screenHeight { get; private set; } // Are we currently rendering in the scene view? public bool isSceneView { get; internal set; } // Current antialiasing method set public PostProcessLayer.Antialiasing antialiasing { get; internal set; } // Mostly used to grab the jitter vector and other TAA-related values when an effect needs // to do temporal reprojection (see: Depth of Field) public TemporalAntialiasing temporalAntialiasing { get; internal set; } public void Reset() { m_Camera = null; width = 0; height = 0; #if UNITY_2017_2_OR_NEWER m_sourceDescriptor = new RenderTextureDescriptor(0, 0); #endif stereoActive = false; xrActiveEye = (int)Camera.StereoscopicEye.Left; screenWidth = 0; screenHeight = 0; command = null; source = 0; destination = 0; sourceFormat = RenderTextureFormat.ARGB32; flip = false; resources = null; propertySheets = null; debugLayer = null; isSceneView = false; antialiasing = PostProcessLayer.Antialiasing.None; temporalAntialiasing = null; uberSheet = null; autoExposureTexture = null; logLut = null; autoExposure = null; bloomBufferNameID = -1; if (userData == null) userData = new Dictionary<string, object>(); userData.Clear(); } // Checks if TAA is enabled & supported public bool IsTemporalAntialiasingActive() { return antialiasing == PostProcessLayer.Antialiasing.TemporalAntialiasing && !isSceneView && temporalAntialiasing.IsSupported(); } // Checks if a specific debug overlay is enabled public bool IsDebugOverlayEnabled(DebugOverlay overlay) { return debugLayer.debugOverlay == overlay; } // Shortcut function public void PushDebugOverlay(CommandBuffer cmd, RenderTargetIdentifier source, PropertySheet sheet, int pass) { debugLayer.PushDebugOverlay(cmd, source, sheet, pass); } // Internal values used for builtin effects // Beware, these may not have been set before a specific builtin effect has been executed internal PropertySheet uberSheet; internal Texture autoExposureTexture; internal LogHistogram logHistogram; internal Texture logLut; internal AutoExposure autoExposure; internal int bloomBufferNameID; } }
275
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { // This asset is used to store references to shaders and other resources we might need at // runtime without having to use a `Resources` folder. This allows for better memory management, // better dependency tracking and better interoperability with asset bundles. public sealed class PostProcessResources : ScriptableObject { [Serializable] public sealed class Shaders { public Shader autoExposure; public Shader bloom; public Shader copy; public Shader copyStd; public Shader discardAlpha; public Shader depthOfField; public Shader finalPass; public Shader grainBaker; public Shader motionBlur; public Shader temporalAntialiasing; public Shader subpixelMorphologicalAntialiasing; public Shader texture2dLerp; public Shader uber; public Shader lut2DBaker; public Shader lightMeter; public Shader gammaHistogram; public Shader waveform; public Shader vectorscope; public Shader debugOverlays; public Shader deferredFog; public Shader scalableAO; public Shader multiScaleAO; public Shader screenSpaceReflections; } [Serializable] public sealed class ComputeShaders { public ComputeShader exposureHistogram; public ComputeShader lut3DBaker; public ComputeShader texture3dLerp; public ComputeShader gammaHistogram; public ComputeShader waveform; public ComputeShader vectorscope; public ComputeShader multiScaleAODownsample1; public ComputeShader multiScaleAODownsample2; public ComputeShader multiScaleAORender; public ComputeShader multiScaleAOUpsample; public ComputeShader gaussianDownsample; } [Serializable] public sealed class SMAALuts { public Texture2D area; public Texture2D search; } public Texture2D[] blueNoise64; public Texture2D[] blueNoise256; public SMAALuts smaaLuts; public Shaders shaders; public ComputeShaders computeShaders; } }
68
orrb
openai
C#
using System.Collections.Generic; namespace UnityEngine.Rendering.PostProcessing { // // Here's a quick look at the architecture of this framework and how it's integrated into Unity // (written between versions 5.6 and 2017.1): // // Users have to be able to plug in their own effects without having to modify the codebase and // these custom effects should work out-of-the-box with all the other features we provide // (volume blending etc). This relies on heavy use of polymorphism, but the only way to get // the serialization system to work well with polymorphism in Unity is to use ScriptableObjects. // // Users can push their custom effects at different (hardcoded) injection points. // // Each effect consists of at least two classes (+ shaders): a POD "Settings" class which only // stores parameters, and a "Renderer" class that holds the rendering logic. Settings are linked // to renderers using a PostProcessAttribute. These are automatically collected at init time // using reflection. Settings in this case are ScriptableObjects, we only need to serialize // these. // // We could store these settings object straight into each volume and call it a day, but // unfortunately there's one feature of Unity that doesn't work well with scene-stored assets: // prefabs. So we need to store all of these settings in a disk-asset and treat them as // sub-assets. // // Note: We have to use ScriptableObject for everything but these don't work with the Animator // tool. It's unfortunate but it's the only way to make it easily extensible. On the other // hand, users can animate post-processing effects using Volumes or straight up scripting. // // Volume blending leverages the physics system for distance checks to the nearest point on // volume colliders. Each volume can have several colliders or any type (cube, mesh...), making // it quite a powerful feature to use. // // Volumes & blending are handled by a singleton manager (see PostProcessManager). // // Rendering is handled by a PostProcessLayer component living on the camera, which mean you // can easily toggle post-processing on & off or change the anti-aliasing type per-camera, // which is very useful when doing multi-layered camera rendering or any other technique that // involves multiple-camera setups. This PostProcessLayer component can also filters volumes // by layers (as in Unity layers) so you can easily choose which volumes should affect the // camera. // // All post-processing shaders MUST use the custom Standard Shader Library bundled with the // framework. The reason for that is because the codebase is meant to work without any // modification on the Classic Render Pipelines (Forward, Deferred...) and the upcoming // Scriptable Render Pipelines (HDPipe, LDPipe...). But these don't have compatible shader // libraries so instead of writing two code paths we chose to provide a minimalist, generic // Standard Library geared toward post-processing use. An added bonus to that if users create // their own post-processing effects using this framework, then they'll work without any // modification on both Classic and Scriptable Render Pipelines. // [ExecuteInEditMode] [AddComponentMenu("Rendering/Post-process Volume", 1001)] public sealed class PostProcessVolume : MonoBehaviour { // Modifying sharedProfile will change the behavior of all volumes using this profile, and // change profile settings that are stored in the project too public PostProcessProfile sharedProfile; [Tooltip("A global volume is applied to the whole scene.")] public bool isGlobal = false; [Min(0f), Tooltip("Outer distance to start blending from. A value of 0 means no blending and the volume overrides will be applied immediatly upon entry.")] public float blendDistance = 0f; [Range(0f, 1f), Tooltip("Total weight of this volume in the scene. 0 means it won't do anything, 1 means full effect.")] public float weight = 1f; [Tooltip("Volume priority in the stack. Higher number means higher priority. Negative values are supported.")] public float priority = 0f; // This property automatically instantiates the profile and make it unique to this volume // so you can safely edit it via scripting at runtime without changing the original asset // in the project. // Note that if you pass in your own profile, it is your responsability to destroy it once // it's not in use anymore. public PostProcessProfile profile { get { if (m_InternalProfile == null) { m_InternalProfile = ScriptableObject.CreateInstance<PostProcessProfile>(); if (sharedProfile != null) { foreach (var item in sharedProfile.settings) { var itemCopy = Instantiate(item); m_InternalProfile.settings.Add(itemCopy); } } } return m_InternalProfile; } set { m_InternalProfile = value; } } internal PostProcessProfile profileRef { get { return m_InternalProfile == null ? sharedProfile : m_InternalProfile; } } int m_PreviousLayer; float m_PreviousPriority; List<Collider> m_TempColliders; PostProcessProfile m_InternalProfile; void OnEnable() { PostProcessManager.instance.Register(this); m_PreviousLayer = gameObject.layer; m_TempColliders = new List<Collider>(); } void OnDisable() { PostProcessManager.instance.Unregister(this); } void Update() { // Unfortunately we need to track the current layer to update the volume manager in // real-time as the user could change it at any time in the editor or at runtime. // Because no event is raised when the layer changes, we have to track it on every // frame :/ int layer = gameObject.layer; if (layer != m_PreviousLayer) { PostProcessManager.instance.UpdateVolumeLayer(this, m_PreviousLayer, layer); m_PreviousLayer = layer; } // Same for `priority`. We could use a property instead, but it doesn't play nice with // the serialization system. Using a custom Attribute/PropertyDrawer for a property is // possible but it doesn't work with Undo/Redo in the editor, which makes it useless. if (priority != m_PreviousPriority) { PostProcessManager.instance.SetLayerDirty(layer); m_PreviousPriority = priority; } } // TODO: Look into a better volume previsualization system void OnDrawGizmos() { var colliders = m_TempColliders; GetComponents(colliders); if (isGlobal || colliders == null) return; #if UNITY_EDITOR // Can't access the UnityEditor.Rendering.PostProcessing namespace from here, so // we'll get the preferred color manually unchecked { int value = UnityEditor.EditorPrefs.GetInt("PostProcessing.Volume.GizmoColor", (int)0x8033cc1a); Gizmos.color = ColorUtilities.ToRGBA((uint)value); } #endif var scale = transform.localScale; var invScale = new Vector3(1f / scale.x, 1f / scale.y, 1f / scale.z); Gizmos.matrix = Matrix4x4.TRS(transform.position, transform.rotation, scale); // Draw a separate gizmo for each collider foreach (var collider in colliders) { if (!collider.enabled) continue; // We'll just use scaling as an approximation for volume skin. It's far from being // correct (and is completely wrong in some cases). Ultimately we'd use a distance // field or at least a tesselate + push modifier on the collider's mesh to get a // better approximation, but the current Gizmo system is a bit limited and because // everything is dynamic in Unity and can be changed at anytime, it's hard to keep // track of changes in an elegant way (which we'd need to implement a nice cache // system for generated volume meshes). var type = collider.GetType(); if (type == typeof(BoxCollider)) { var c = (BoxCollider)collider; Gizmos.DrawCube(c.center, c.size); Gizmos.DrawWireCube(c.center, c.size + invScale * blendDistance * 4f); } else if (type == typeof(SphereCollider)) { var c = (SphereCollider)collider; Gizmos.DrawSphere(c.center, c.radius); Gizmos.DrawWireSphere(c.center, c.radius + invScale.x * blendDistance * 2f); } else if (type == typeof(MeshCollider)) { var c = (MeshCollider)collider; // Only convex mesh colliders are allowed if (!c.convex) c.convex = true; // Mesh pivot should be centered or this won't work Gizmos.DrawMesh(c.sharedMesh); Gizmos.DrawWireMesh(c.sharedMesh, Vector3.zero, Quaternion.identity, Vector3.one + invScale * blendDistance * 4f); } // Nothing for capsule (DrawCapsule isn't exposed in Gizmo), terrain, wheel and // other colliders... } colliders.Clear(); } } }
226
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { [AttributeUsage(AttributeTargets.Field, AllowMultiple = false)] public sealed class DisplayNameAttribute : Attribute { public readonly string displayName; public DisplayNameAttribute(string displayName) { this.displayName = displayName; } } }
16
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { [AttributeUsage(AttributeTargets.Field, AllowMultiple = false)] public sealed class MaxAttribute : Attribute { public readonly float max; public MaxAttribute(float max) { this.max = max; } } }
16
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { [AttributeUsage(AttributeTargets.Field, AllowMultiple = false)] public sealed class MinAttribute : Attribute { public readonly float min; public MinAttribute(float min) { this.min = min; } } }
16
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { [AttributeUsage(AttributeTargets.Field, AllowMultiple = false)] public sealed class MinMaxAttribute : Attribute { public readonly float min; public readonly float max; public MinMaxAttribute(float min, float max) { this.min = min; this.max = max; } } }
18
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { [AttributeUsage(AttributeTargets.Class, AllowMultiple = false)] public sealed class PostProcessAttribute : Attribute { public readonly Type renderer; public readonly PostProcessEvent eventType; public readonly string menuItem; public readonly bool allowInSceneView; internal readonly bool builtinEffect; public PostProcessAttribute(Type renderer, PostProcessEvent eventType, string menuItem, bool allowInSceneView = true) { this.renderer = renderer; this.eventType = eventType; this.menuItem = menuItem; this.allowInSceneView = allowInSceneView; builtinEffect = false; } internal PostProcessAttribute(Type renderer, string menuItem, bool allowInSceneView = true) { this.renderer = renderer; this.menuItem = menuItem; this.allowInSceneView = allowInSceneView; builtinEffect = true; } } }
32
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { [AttributeUsage(AttributeTargets.Field, AllowMultiple = false)] public sealed class TrackballAttribute : Attribute { public enum Mode { None, Lift, Gamma, Gain } public readonly Mode mode; public TrackballAttribute(Mode mode) { this.mode = mode; } } }
24
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { public enum AmbientOcclusionMode { ScalableAmbientObscurance, MultiScaleVolumetricObscurance } public enum AmbientOcclusionQuality { Lowest, Low, Medium, High, Ultra } [Serializable] public sealed class AmbientOcclusionModeParameter : ParameterOverride<AmbientOcclusionMode> {} [Serializable] public sealed class AmbientOcclusionQualityParameter : ParameterOverride<AmbientOcclusionQuality> {} [Serializable] [PostProcess(typeof(AmbientOcclusionRenderer), "Unity/Ambient Occlusion")] public sealed class AmbientOcclusion : PostProcessEffectSettings { // Shared parameters [Tooltip("The ambient occlusion method to use. \"MSVO\" is higher quality and faster on desktop & console platforms but requires compute shader support.")] public AmbientOcclusionModeParameter mode = new AmbientOcclusionModeParameter { value = AmbientOcclusionMode.MultiScaleVolumetricObscurance }; [Range(0f, 4f), Tooltip("Degree of darkness added by ambient occlusion.")] public FloatParameter intensity = new FloatParameter { value = 0f }; [ColorUsage(false), Tooltip("Custom color to use for the ambient occlusion.")] public ColorParameter color = new ColorParameter { value = Color.black }; [Tooltip("Only affects ambient lighting. This mode is only available with the Deferred rendering path and HDR rendering. Objects rendered with the Forward rendering path won't get any ambient occlusion.")] public BoolParameter ambientOnly = new BoolParameter { value = true }; // MSVO-only parameters [Range(-8f, 0f)] public FloatParameter noiseFilterTolerance = new FloatParameter { value = 0f }; // Hidden [Range(-8f, -1f)] public FloatParameter blurTolerance = new FloatParameter { value = -4.6f }; // Hidden [Range(-12f, -1f)] public FloatParameter upsampleTolerance = new FloatParameter { value = -12f }; // Hidden [Range(1f, 10f), Tooltip("Modifies thickness of occluders. This increases dark areas but also introduces dark halo around objects.")] public FloatParameter thicknessModifier = new FloatParameter { value = 1f }; // HDRP-only parameters [Range(0f, 1f), Tooltip("")] public FloatParameter directLightingStrength = new FloatParameter { value = 0f }; // SAO-only parameters [Tooltip("Radius of sample points, which affects extent of darkened areas.")] public FloatParameter radius = new FloatParameter { value = 0.25f }; [Tooltip("Number of sample points, which affects quality and performance. Lowest, Low & Medium passes are downsampled. High and Ultra are not and should only be used on high-end hardware.")] public AmbientOcclusionQualityParameter quality = new AmbientOcclusionQualityParameter { value = AmbientOcclusionQuality.Medium }; public override bool IsEnabledAndSupported(PostProcessRenderContext context) { bool state = enabled.value && intensity.value > 0f; if (mode.value == AmbientOcclusionMode.ScalableAmbientObscurance) { state &= !RuntimeUtilities.scriptableRenderPipelineActive; } else if (mode.value == AmbientOcclusionMode.MultiScaleVolumetricObscurance) { #if UNITY_2017_1_OR_NEWER state &= SystemInfo.supportsComputeShaders && SystemInfo.SupportsRenderTextureFormat(RenderTextureFormat.RFloat) && SystemInfo.SupportsRenderTextureFormat(RenderTextureFormat.RHalf) && SystemInfo.SupportsRenderTextureFormat(RenderTextureFormat.R8); #else state = false; #endif } return state; } } public interface IAmbientOcclusionMethod { DepthTextureMode GetCameraFlags(); void RenderAfterOpaque(PostProcessRenderContext context); void RenderAmbientOnly(PostProcessRenderContext context); void CompositeAmbientOnly(PostProcessRenderContext context); void Release(); } public sealed class AmbientOcclusionRenderer : PostProcessEffectRenderer<AmbientOcclusion> { IAmbientOcclusionMethod[] m_Methods; public override void Init() { if (m_Methods == null) { m_Methods = new IAmbientOcclusionMethod[] { new ScalableAO(settings), new MultiScaleVO(settings), }; } } public bool IsAmbientOnly(PostProcessRenderContext context) { var camera = context.camera; return settings.ambientOnly.value && camera.actualRenderingPath == RenderingPath.DeferredShading && camera.allowHDR; } public IAmbientOcclusionMethod Get() { return m_Methods[(int)settings.mode.value]; } public override DepthTextureMode GetCameraFlags() { return Get().GetCameraFlags(); } public override void Release() { foreach (var m in m_Methods) m.Release(); } public ScalableAO GetScalableAO() { return (ScalableAO)m_Methods[(int)AmbientOcclusionMode.ScalableAmbientObscurance]; } public MultiScaleVO GetMultiScaleVO() { return (MultiScaleVO)m_Methods[(int)AmbientOcclusionMode.MultiScaleVolumetricObscurance]; } // Unused public override void Render(PostProcessRenderContext context) { } } }
157
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { public enum EyeAdaptation { Progressive, Fixed } [Serializable] public sealed class EyeAdaptationParameter : ParameterOverride<EyeAdaptation> {} [Serializable] [PostProcess(typeof(AutoExposureRenderer), "Unity/Auto Exposure")] public sealed class AutoExposure : PostProcessEffectSettings { [MinMax(1f, 99f), DisplayName("Filtering (%)"), Tooltip("Filters the bright & dark part of the histogram when computing the average luminance to avoid very dark pixels & very bright pixels from contributing to the auto exposure. Unit is in percent.")] public Vector2Parameter filtering = new Vector2Parameter { value = new Vector2(50f, 95f) }; [Range(LogHistogram.rangeMin, LogHistogram.rangeMax), DisplayName("Minimum (EV)"), Tooltip("Minimum average luminance to consider for auto exposure (in EV).")] public FloatParameter minLuminance = new FloatParameter { value = 0f }; [Range(LogHistogram.rangeMin, LogHistogram.rangeMax), DisplayName("Maximum (EV)"), Tooltip("Maximum average luminance to consider for auto exposure (in EV).")] public FloatParameter maxLuminance = new FloatParameter { value = 0f }; [Min(0f), Tooltip("Exposure bias. Use this to offset the global exposure of the scene.")] public FloatParameter keyValue = new FloatParameter { value = 1f }; [DisplayName("Type"), Tooltip("Use \"Progressive\" if you want auto exposure to be animated. Use \"Fixed\" otherwise.")] public EyeAdaptationParameter eyeAdaptation = new EyeAdaptationParameter { value = EyeAdaptation.Progressive }; [Min(0f), Tooltip("Adaptation speed from a dark to a light environment.")] public FloatParameter speedUp = new FloatParameter { value = 2f }; [Min(0f), Tooltip("Adaptation speed from a light to a dark environment.")] public FloatParameter speedDown = new FloatParameter { value = 1f }; public override bool IsEnabledAndSupported(PostProcessRenderContext context) { return enabled.value && SystemInfo.supportsComputeShaders && SystemInfo.SupportsRenderTextureFormat(RenderTextureFormat.RFloat); } } public sealed class AutoExposureRenderer : PostProcessEffectRenderer<AutoExposure> { const int k_NumEyes = 2; const int k_NumAutoExposureTextures = 2; readonly RenderTexture[][] m_AutoExposurePool = new RenderTexture[k_NumEyes][]; int[] m_AutoExposurePingPong = new int[k_NumEyes]; RenderTexture m_CurrentAutoExposure; public AutoExposureRenderer() { for (int eye = 0; eye < k_NumEyes; eye++) { m_AutoExposurePool[eye] = new RenderTexture[k_NumAutoExposureTextures]; m_AutoExposurePingPong[eye] = 0; } } void CheckTexture(int eye, int id) { if (m_AutoExposurePool[eye][id] == null || !m_AutoExposurePool[eye][id].IsCreated()) { m_AutoExposurePool[eye][id] = new RenderTexture(1, 1, 0, RenderTextureFormat.RFloat); m_AutoExposurePool[eye][id].Create(); } } public override void Render(PostProcessRenderContext context) { var cmd = context.command; cmd.BeginSample("AutoExposureLookup"); var sheet = context.propertySheets.Get(context.resources.shaders.autoExposure); sheet.ClearKeywords(); // Prepare autoExpo texture pool CheckTexture(context.xrActiveEye, 0); CheckTexture(context.xrActiveEye, 1); // Make sure filtering values are correct to avoid apocalyptic consequences float lowPercent = settings.filtering.value.x; float highPercent = settings.filtering.value.y; const float kMinDelta = 1e-2f; highPercent = Mathf.Clamp(highPercent, 1f + kMinDelta, 99f); lowPercent = Mathf.Clamp(lowPercent, 1f, highPercent - kMinDelta); // Clamp min/max adaptation values as well float minLum = settings.minLuminance.value; float maxLum = settings.maxLuminance.value; settings.minLuminance.value = Mathf.Min(minLum, maxLum); settings.maxLuminance.value = Mathf.Max(minLum, maxLum); // Compute auto exposure sheet.properties.SetBuffer(ShaderIDs.HistogramBuffer, context.logHistogram.data); sheet.properties.SetVector(ShaderIDs.Params, new Vector4(lowPercent * 0.01f, highPercent * 0.01f, RuntimeUtilities.Exp2(settings.minLuminance.value), RuntimeUtilities.Exp2(settings.maxLuminance.value))); sheet.properties.SetVector(ShaderIDs.Speed, new Vector2(settings.speedDown.value, settings.speedUp.value)); sheet.properties.SetVector(ShaderIDs.ScaleOffsetRes, context.logHistogram.GetHistogramScaleOffsetRes(context)); sheet.properties.SetFloat(ShaderIDs.ExposureCompensation, settings.keyValue.value); if (m_ResetHistory || !Application.isPlaying) { // We don't want eye adaptation when not in play mode because the GameView isn't // animated, thus making it harder to tweak. Just use the final audo exposure value. m_CurrentAutoExposure = m_AutoExposurePool[context.xrActiveEye][0]; cmd.BlitFullscreenTriangle(BuiltinRenderTextureType.None, m_CurrentAutoExposure, sheet, (int)EyeAdaptation.Fixed); // Copy current exposure to the other pingpong target to avoid adapting from black RuntimeUtilities.CopyTexture(cmd, m_AutoExposurePool[context.xrActiveEye][0], m_AutoExposurePool[context.xrActiveEye][1]); m_ResetHistory = false; } else { int pp = m_AutoExposurePingPong[context.xrActiveEye]; var src = m_AutoExposurePool[context.xrActiveEye][++pp % 2]; var dst = m_AutoExposurePool[context.xrActiveEye][++pp % 2]; cmd.BlitFullscreenTriangle(src, dst, sheet, (int)settings.eyeAdaptation.value); m_AutoExposurePingPong[context.xrActiveEye] = ++pp % 2; m_CurrentAutoExposure = dst; } cmd.EndSample("AutoExposureLookup"); context.autoExposureTexture = m_CurrentAutoExposure; context.autoExposure = settings; } public override void Release() { foreach (var rtEyeSet in m_AutoExposurePool) { foreach (var rt in rtEyeSet) RuntimeUtilities.Destroy(rt); } } } }
144
orrb
openai
C#
using System; using UnityEngine.Serialization; namespace UnityEngine.Rendering.PostProcessing { // For now and by popular request, this bloom effect is geared toward artists so they have full // control over how it looks at the expense of physical correctness. // Eventually we will need a "true" natural bloom effect with proper energy conservation. [Serializable] [PostProcess(typeof(BloomRenderer), "Unity/Bloom")] public sealed class Bloom : PostProcessEffectSettings { [Min(0f), Tooltip("Strength of the bloom filter. Values higher than 1 will make bloom contribute more energy to the final render. Keep this under or equal to 1 if you want energy conservation.")] public FloatParameter intensity = new FloatParameter { value = 0f }; [Min(0f), Tooltip("Filters out pixels under this level of brightness. Value is in gamma-space.")] public FloatParameter threshold = new FloatParameter { value = 1f }; [Range(0f, 1f), Tooltip("Makes transition between under/over-threshold gradual (0 = hard threshold, 1 = soft threshold).")] public FloatParameter softKnee = new FloatParameter { value = 0.5f }; [Range(1f, 10f), Tooltip("Changes the extent of veiling effects. For maximum quality stick to integer values. Because this value changes the internal iteration count, animating it isn't recommended as it may introduce small hiccups in the perceived radius.")] public FloatParameter diffusion = new FloatParameter { value = 7f }; [Range(-1f, 1f), Tooltip("Distorts the bloom to give an anamorphic look. Negative values distort vertically, positive values distort horizontally.")] public FloatParameter anamorphicRatio = new FloatParameter { value = 0f }; #if UNITY_2018_1_OR_NEWER [ColorUsage(false, true), Tooltip("Global tint of the bloom filter.")] #else [ColorUsage(false, true, 0f, 8f, 0.125f, 3f), Tooltip("Global tint of the bloom filter.")] #endif public ColorParameter color = new ColorParameter { value = Color.white }; [FormerlySerializedAs("mobileOptimized")] [Tooltip("Boost performances by lowering the effect quality. This settings is meant to be used on mobile and other low-end platforms but can also provide a nice performance boost on desktops and consoles.")] public BoolParameter fastMode = new BoolParameter { value = false }; [Tooltip("Dirtiness texture to add smudges or dust to the bloom effect."), DisplayName("Texture")] public TextureParameter dirtTexture = new TextureParameter { value = null }; [Min(0f), Tooltip("Amount of dirtiness."), DisplayName("Intensity")] public FloatParameter dirtIntensity = new FloatParameter { value = 0f }; public override bool IsEnabledAndSupported(PostProcessRenderContext context) { return enabled.value && intensity.value > 0f; } } public sealed class BloomRenderer : PostProcessEffectRenderer<Bloom> { enum Pass { Prefilter13, Prefilter4, Downsample13, Downsample4, UpsampleTent, UpsampleBox, DebugOverlayThreshold, DebugOverlayTent, DebugOverlayBox } // [down,up] Level[] m_Pyramid; const int k_MaxPyramidSize = 16; // Just to make sure we handle 64k screens... Future-proof! struct Level { internal int down; internal int up; } public override void Init() { m_Pyramid = new Level[k_MaxPyramidSize]; for (int i = 0; i < k_MaxPyramidSize; i++) { m_Pyramid[i] = new Level { down = Shader.PropertyToID("_BloomMipDown" + i), up = Shader.PropertyToID("_BloomMipUp" + i) }; } } public override void Render(PostProcessRenderContext context) { var cmd = context.command; cmd.BeginSample("BloomPyramid"); var sheet = context.propertySheets.Get(context.resources.shaders.bloom); // Apply auto exposure adjustment in the prefiltering pass sheet.properties.SetTexture(ShaderIDs.AutoExposureTex, context.autoExposureTexture); // Negative anamorphic ratio values distort vertically - positive is horizontal float ratio = Mathf.Clamp(settings.anamorphicRatio, -1, 1); float rw = ratio < 0 ? -ratio : 0f; float rh = ratio > 0 ? ratio : 0f; // Do bloom on a half-res buffer, full-res doesn't bring much and kills performances on // fillrate limited platforms int tw = Mathf.FloorToInt(context.screenWidth / (2f - rw)); int th = Mathf.FloorToInt(context.screenHeight / (2f - rh)); // Determine the iteration count int s = Mathf.Max(tw, th); float logs = Mathf.Log(s, 2f) + Mathf.Min(settings.diffusion.value, 10f) - 10f; int logs_i = Mathf.FloorToInt(logs); int iterations = Mathf.Clamp(logs_i, 1, k_MaxPyramidSize); float sampleScale = 0.5f + logs - logs_i; sheet.properties.SetFloat(ShaderIDs.SampleScale, sampleScale); // Prefiltering parameters float lthresh = Mathf.GammaToLinearSpace(settings.threshold.value); float knee = lthresh * settings.softKnee.value + 1e-5f; var threshold = new Vector4(lthresh, lthresh - knee, knee * 2f, 0.25f / knee); sheet.properties.SetVector(ShaderIDs.Threshold, threshold); int qualityOffset = settings.fastMode ? 1 : 0; // Downsample var lastDown = context.source; for (int i = 0; i < iterations; i++) { int mipDown = m_Pyramid[i].down; int mipUp = m_Pyramid[i].up; int pass = i == 0 ? (int)Pass.Prefilter13 + qualityOffset : (int)Pass.Downsample13 + qualityOffset; context.GetScreenSpaceTemporaryRT(cmd, mipDown, 0, context.sourceFormat, RenderTextureReadWrite.Default, FilterMode.Bilinear, tw, th); context.GetScreenSpaceTemporaryRT(cmd, mipUp, 0, context.sourceFormat, RenderTextureReadWrite.Default, FilterMode.Bilinear, tw, th); cmd.BlitFullscreenTriangle(lastDown, mipDown, sheet, pass); lastDown = mipDown; tw = Mathf.Max(tw / 2, 1); th = Mathf.Max(th / 2, 1); } // Upsample int lastUp = m_Pyramid[iterations - 1].down; for (int i = iterations - 2; i >= 0; i--) { int mipDown = m_Pyramid[i].down; int mipUp = m_Pyramid[i].up; cmd.SetGlobalTexture(ShaderIDs.BloomTex, mipDown); cmd.BlitFullscreenTriangle(lastUp, mipUp, sheet, (int)Pass.UpsampleTent + qualityOffset); lastUp = mipUp; } var linearColor = settings.color.value.linear; float intensity = RuntimeUtilities.Exp2(settings.intensity.value / 10f) - 1f; var shaderSettings = new Vector4(sampleScale, intensity, settings.dirtIntensity.value, iterations); // Debug overlays if (context.IsDebugOverlayEnabled(DebugOverlay.BloomThreshold)) { context.PushDebugOverlay(cmd, context.source, sheet, (int)Pass.DebugOverlayThreshold); } else if (context.IsDebugOverlayEnabled(DebugOverlay.BloomBuffer)) { sheet.properties.SetVector(ShaderIDs.ColorIntensity, new Vector4(linearColor.r, linearColor.g, linearColor.b, intensity)); context.PushDebugOverlay(cmd, m_Pyramid[0].up, sheet, (int)Pass.DebugOverlayTent + qualityOffset); } // Lens dirtiness // Keep the aspect ratio correct & center the dirt texture, we don't want it to be // stretched or squashed var dirtTexture = settings.dirtTexture.value == null ? RuntimeUtilities.blackTexture : settings.dirtTexture.value; var dirtRatio = (float)dirtTexture.width / (float)dirtTexture.height; var screenRatio = (float)context.screenWidth / (float)context.screenHeight; var dirtTileOffset = new Vector4(1f, 1f, 0f, 0f); if (dirtRatio > screenRatio) { dirtTileOffset.x = screenRatio / dirtRatio; dirtTileOffset.z = (1f - dirtTileOffset.x) * 0.5f; } else if (screenRatio > dirtRatio) { dirtTileOffset.y = dirtRatio / screenRatio; dirtTileOffset.w = (1f - dirtTileOffset.y) * 0.5f; } // Shader properties var uberSheet = context.uberSheet; uberSheet.EnableKeyword("BLOOM"); uberSheet.properties.SetVector(ShaderIDs.Bloom_DirtTileOffset, dirtTileOffset); uberSheet.properties.SetVector(ShaderIDs.Bloom_Settings, shaderSettings); uberSheet.properties.SetColor(ShaderIDs.Bloom_Color, linearColor); uberSheet.properties.SetTexture(ShaderIDs.Bloom_DirtTex, dirtTexture); cmd.SetGlobalTexture(ShaderIDs.BloomTex, lastUp); // Cleanup for (int i = 0; i < iterations; i++) { if (m_Pyramid[i].down != lastUp) cmd.ReleaseTemporaryRT(m_Pyramid[i].down); if (m_Pyramid[i].up != lastUp) cmd.ReleaseTemporaryRT(m_Pyramid[i].up); } cmd.EndSample("BloomPyramid"); context.bloomBufferNameID = lastUp; } } }
219
orrb
openai
C#
using System; using UnityEngine.Serialization; namespace UnityEngine.Rendering.PostProcessing { [Serializable] [PostProcess(typeof(ChromaticAberrationRenderer), "Unity/Chromatic Aberration")] public sealed class ChromaticAberration : PostProcessEffectSettings { [Tooltip("Shift the hue of chromatic aberrations.")] public TextureParameter spectralLut = new TextureParameter { value = null }; [Range(0f, 1f), Tooltip("Amount of tangential distortion.")] public FloatParameter intensity = new FloatParameter { value = 0f }; [FormerlySerializedAs("mobileOptimized")] [Tooltip("Boost performances by lowering the effect quality. This settings is meant to be used on mobile and other low-end platforms but can also provide a nice performance boost on desktops and consoles.")] public BoolParameter fastMode = new BoolParameter { value = false }; public override bool IsEnabledAndSupported(PostProcessRenderContext context) { return enabled.value && intensity.value > 0f; } } public sealed class ChromaticAberrationRenderer : PostProcessEffectRenderer<ChromaticAberration> { Texture2D m_InternalSpectralLut; public override void Render(PostProcessRenderContext context) { var spectralLut = settings.spectralLut.value; if (spectralLut == null) { if (m_InternalSpectralLut == null) { m_InternalSpectralLut = new Texture2D(3, 1, TextureFormat.RGB24, false) { name = "Chromatic Aberration Spectrum Lookup", filterMode = FilterMode.Bilinear, wrapMode = TextureWrapMode.Clamp, anisoLevel = 0, hideFlags = HideFlags.DontSave }; m_InternalSpectralLut.SetPixels(new [] { new Color(1f, 0f, 0f), new Color(0f, 1f, 0f), new Color(0f, 0f, 1f) }); m_InternalSpectralLut.Apply(); } spectralLut = m_InternalSpectralLut; } var sheet = context.uberSheet; sheet.EnableKeyword(settings.fastMode ? "CHROMATIC_ABERRATION_LOW" : "CHROMATIC_ABERRATION" ); sheet.properties.SetFloat(ShaderIDs.ChromaticAberration_Amount, settings.intensity * 0.05f); sheet.properties.SetTexture(ShaderIDs.ChromaticAberration_SpectralLut, spectralLut); } public override void Release() { RuntimeUtilities.Destroy(m_InternalSpectralLut); m_InternalSpectralLut = null; } } }
77
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { public enum GradingMode { LowDefinitionRange, HighDefinitionRange, External } public enum Tonemapper { None, // Neutral tonemapper (based off John Hable's & Jim Hejl's work) Neutral, // ACES Filmic reference tonemapper (custom approximation) ACES, // Custom artist-friendly curve Custom } [Serializable] public sealed class GradingModeParameter : ParameterOverride<GradingMode> {} [Serializable] public sealed class TonemapperParameter : ParameterOverride<Tonemapper> {} // TODO: Could use some refactoring, too much duplicated code here [Serializable] [PostProcess(typeof(ColorGradingRenderer), "Unity/Color Grading")] public sealed class ColorGrading : PostProcessEffectSettings { [DisplayName("Mode"), Tooltip("Select a color grading mode that fits your dynamic range and workflow. Use HDR if your camera is set to render in HDR and your target platform supports it. Use LDR for low-end mobiles or devices that don't support HDR. Use External if you prefer authoring a Log LUT in external softwares.")] public GradingModeParameter gradingMode = new GradingModeParameter { value = GradingMode.HighDefinitionRange }; [DisplayName("Lookup Texture"), Tooltip("")] public TextureParameter externalLut = new TextureParameter { value = null }; [DisplayName("Mode"), Tooltip("Select a tonemapping algorithm to use at the end of the color grading process.")] public TonemapperParameter tonemapper = new TonemapperParameter { value = Tonemapper.None }; [DisplayName("Toe Strength"), Range(0f, 1f), Tooltip("Affects the transition between the toe and the mid section of the curve. A value of 0 means no toe, a value of 1 means a very hard transition.")] public FloatParameter toneCurveToeStrength = new FloatParameter { value = 0f }; [DisplayName("Toe Length"), Range(0f, 1f), Tooltip("Affects how much of the dynamic range is in the toe. With a small value, the toe will be very short and quickly transition into the linear section, and with a longer value having a longer toe.")] public FloatParameter toneCurveToeLength = new FloatParameter { value = 0.5f }; [DisplayName("Shoulder Strength"), Range(0f, 1f), Tooltip("Affects the transition between the mid section and the shoulder of the curve. A value of 0 means no shoulder, a value of 1 means a very hard transition.")] public FloatParameter toneCurveShoulderStrength = new FloatParameter { value = 0f }; [DisplayName("Shoulder Length"), Min(0f), Tooltip("Affects how many F-stops (EV) to add to the dynamic range of the curve.")] public FloatParameter toneCurveShoulderLength = new FloatParameter { value = 0.5f }; [DisplayName("Shoulder Angle"), Range(0f, 1f), Tooltip("Affects how much overshoot to add to the shoulder.")] public FloatParameter toneCurveShoulderAngle = new FloatParameter { value = 0f }; [DisplayName("Gamma"), Min(0.001f), Tooltip("")] public FloatParameter toneCurveGamma = new FloatParameter { value = 1f }; [DisplayName("Lookup Texture"), Tooltip("Custom log-space lookup texture (strip format, e.g. 1024x32). EXR format is highly recommended or precision will be heavily degraded. Refer to the documentation for more information about how to create such a Lut.")] public TextureParameter logLut = new TextureParameter { value = null }; [DisplayName("Lookup Texture"), Tooltip("Custom lookup texture (strip format, e.g. 256x16) to apply before the rest of the color grading operators. If none is provided, a neutral one will be generated internally.")] public TextureParameter ldrLut = new TextureParameter { value = null }; // LDR only [DisplayName("Temperature"), Range(-100f, 100f), Tooltip("Sets the white balance to a custom color temperature.")] public FloatParameter temperature = new FloatParameter { value = 0f }; [DisplayName("Tint"), Range(-100f, 100f), Tooltip("Sets the white balance to compensate for a green or magenta tint.")] public FloatParameter tint = new FloatParameter { value = 0f }; #if UNITY_2018_1_OR_NEWER [DisplayName("Color Filter"), ColorUsage(false, true), Tooltip("Tint the render by multiplying a color.")] #else [DisplayName("Color Filter"), ColorUsage(false, true, 0f, 8f, 0.125f, 3f), Tooltip("Tint the render by multiplying a color.")] #endif public ColorParameter colorFilter = new ColorParameter { value = Color.white }; [DisplayName("Hue Shift"), Range(-180f, 180f), Tooltip("Shift the hue of all colors.")] public FloatParameter hueShift = new FloatParameter { value = 0f }; [DisplayName("Saturation"), Range(-100f, 100f), Tooltip("Pushes the intensity of all colors.")] public FloatParameter saturation = new FloatParameter { value = 0f }; [DisplayName("Brightness"), Range(-100f, 100f), Tooltip("Makes the image brighter or darker.")] public FloatParameter brightness = new FloatParameter { value = 0f }; // LDR only [DisplayName("Post-exposure (EV)"), Tooltip("Adjusts the overall exposure of the scene in EV units. This is applied after HDR effect and right before tonemapping so it won't affect previous effects in the chain.")] public FloatParameter postExposure = new FloatParameter { value = 0f }; // HDR only [DisplayName("Contrast"), Range(-100f, 100f), Tooltip("Expands or shrinks the overall range of tonal values.")] public FloatParameter contrast = new FloatParameter { value = 0f }; [DisplayName("Red"), Range(-200f, 200f), Tooltip("Modify influence of the red channel in the overall mix.")] public FloatParameter mixerRedOutRedIn = new FloatParameter { value = 100f }; [DisplayName("Green"), Range(-200f, 200f), Tooltip("Modify influence of the green channel in the overall mix.")] public FloatParameter mixerRedOutGreenIn = new FloatParameter { value = 0f }; [DisplayName("Blue"), Range(-200f, 200f), Tooltip("Modify influence of the blue channel in the overall mix.")] public FloatParameter mixerRedOutBlueIn = new FloatParameter { value = 0f }; [DisplayName("Red"), Range(-200f, 200f), Tooltip("Modify influence of the red channel in the overall mix.")] public FloatParameter mixerGreenOutRedIn = new FloatParameter { value = 0f }; [DisplayName("Green"), Range(-200f, 200f), Tooltip("Modify influence of the green channel in the overall mix.")] public FloatParameter mixerGreenOutGreenIn = new FloatParameter { value = 100f }; [DisplayName("Blue"), Range(-200f, 200f), Tooltip("Modify influence of the blue channel in the overall mix.")] public FloatParameter mixerGreenOutBlueIn = new FloatParameter { value = 0f }; [DisplayName("Red"), Range(-200f, 200f), Tooltip("Modify influence of the red channel in the overall mix.")] public FloatParameter mixerBlueOutRedIn = new FloatParameter { value = 0f }; [DisplayName("Green"), Range(-200f, 200f), Tooltip("Modify influence of the green channel in the overall mix.")] public FloatParameter mixerBlueOutGreenIn = new FloatParameter { value = 0f }; [DisplayName("Blue"), Range(-200f, 200f), Tooltip("Modify influence of the blue channel in the overall mix.")] public FloatParameter mixerBlueOutBlueIn = new FloatParameter { value = 100f }; [DisplayName("Lift"), Tooltip("Controls the darkest portions of the render."), Trackball(TrackballAttribute.Mode.Lift)] public Vector4Parameter lift = new Vector4Parameter { value = new Vector4(1f, 1f, 1f, 0f) }; [DisplayName("Gamma"), Tooltip("Power function that controls midrange tones."), Trackball(TrackballAttribute.Mode.Gamma)] public Vector4Parameter gamma = new Vector4Parameter { value = new Vector4(1f, 1f, 1f, 0f) }; [DisplayName("Gain"), Tooltip("Controls the lightest portions of the render."), Trackball(TrackballAttribute.Mode.Gain)] public Vector4Parameter gain = new Vector4Parameter { value = new Vector4(1f, 1f, 1f, 0f) }; public SplineParameter masterCurve = new SplineParameter { value = new Spline(new AnimationCurve(new Keyframe(0f, 0f, 1f, 1f), new Keyframe(1f, 1f, 1f, 1f)), 0f, false, new Vector2(0f, 1f)) }; public SplineParameter redCurve = new SplineParameter { value = new Spline(new AnimationCurve(new Keyframe(0f, 0f, 1f, 1f), new Keyframe(1f, 1f, 1f, 1f)), 0f, false, new Vector2(0f, 1f)) }; public SplineParameter greenCurve = new SplineParameter { value = new Spline(new AnimationCurve(new Keyframe(0f, 0f, 1f, 1f), new Keyframe(1f, 1f, 1f, 1f)), 0f, false, new Vector2(0f, 1f)) }; public SplineParameter blueCurve = new SplineParameter { value = new Spline(new AnimationCurve(new Keyframe(0f, 0f, 1f, 1f), new Keyframe(1f, 1f, 1f, 1f)), 0f, false, new Vector2(0f, 1f)) }; public SplineParameter hueVsHueCurve = new SplineParameter { value = new Spline(new AnimationCurve(), 0.5f, true, new Vector2(0f, 1f)) }; public SplineParameter hueVsSatCurve = new SplineParameter { value = new Spline(new AnimationCurve(), 0.5f, true, new Vector2(0f, 1f)) }; public SplineParameter satVsSatCurve = new SplineParameter { value = new Spline(new AnimationCurve(), 0.5f, false, new Vector2(0f, 1f)) }; public SplineParameter lumVsSatCurve = new SplineParameter { value = new Spline(new AnimationCurve(), 0.5f, false, new Vector2(0f, 1f)) }; public override bool IsEnabledAndSupported(PostProcessRenderContext context) { if (gradingMode.value == GradingMode.External) { if (!SystemInfo.supports3DRenderTextures || !SystemInfo.supportsComputeShaders) return false; } return enabled.value; } } public sealed class ColorGradingRenderer : PostProcessEffectRenderer<ColorGrading> { enum Pass { LutGenLDRFromScratch, LutGenLDR, LutGenHDR2D } Texture2D m_GradingCurves; readonly Color[] m_Pixels = new Color[Spline.k_Precision * 2]; // Avoids GC stress RenderTexture m_InternalLdrLut; RenderTexture m_InternalLogLut; const int k_Lut2DSize = 32; const int k_Lut3DSize = 33; readonly HableCurve m_HableCurve = new HableCurve(); public override void Render(PostProcessRenderContext context) { var gradingMode = settings.gradingMode.value; var supportComputeTex3D = SystemInfo.supports3DRenderTextures && SystemInfo.supportsComputeShaders && SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLCore; if (gradingMode == GradingMode.External) RenderExternalPipeline3D(context); else if (gradingMode == GradingMode.HighDefinitionRange && supportComputeTex3D) RenderHDRPipeline3D(context); else if (gradingMode == GradingMode.HighDefinitionRange) RenderHDRPipeline2D(context); else RenderLDRPipeline2D(context); } // Do color grading using an externally authored 3D lut; it requires Texture3D support and // compute shaders in case blending is required - Desktop / Consoles / Some high-end mobiles void RenderExternalPipeline3D(PostProcessRenderContext context) { var lut = settings.externalLut.value; if (lut == null) return; var uberSheet = context.uberSheet; uberSheet.EnableKeyword("COLOR_GRADING_HDR_3D"); uberSheet.properties.SetTexture(ShaderIDs.Lut3D, lut); uberSheet.properties.SetVector(ShaderIDs.Lut3D_Params, new Vector2(1f / lut.width, lut.width - 1f)); uberSheet.properties.SetFloat(ShaderIDs.PostExposure, RuntimeUtilities.Exp2(settings.postExposure.value)); context.logLut = lut; } // HDR color pipeline is rendered to a 3D lut; it requires Texture3D & compute shaders // support - Desktop / Consoles / Some high-end mobiles // TODO: Use ShaderIDs for compute once the compatible APIs go in void RenderHDRPipeline3D(PostProcessRenderContext context) { // Unfortunately because AnimationCurve doesn't implement GetHashCode and we don't have // any reliable way to figure out if a curve data is different from another one we can't // skip regenerating the Lut if nothing has changed. So it has to be done on every // frame... // It's not a very expensive operation anyway (we're talking about filling a 33x33x33 // Lut on the GPU) but every little thing helps, especially on mobile. { CheckInternalLogLut(); // Lut setup var compute = context.resources.computeShaders.lut3DBaker; int kernel = 0; switch (settings.tonemapper.value) { case Tonemapper.None: kernel = compute.FindKernel("KGenLut3D_NoTonemap"); break; case Tonemapper.Neutral: kernel = compute.FindKernel("KGenLut3D_NeutralTonemap"); break; case Tonemapper.ACES: kernel = compute.FindKernel("KGenLut3D_AcesTonemap"); break; case Tonemapper.Custom: kernel = compute.FindKernel("KGenLut3D_CustomTonemap"); break; } int groupSizeXY = Mathf.CeilToInt(k_Lut3DSize / 8f); int groupSizeZ = Mathf.CeilToInt(k_Lut3DSize / (RuntimeUtilities.isAndroidOpenGL ? 2f : 8f)); var cmd = context.command; cmd.SetComputeTextureParam(compute, kernel, "_Output", m_InternalLogLut); cmd.SetComputeVectorParam(compute, "_Size", new Vector4(k_Lut3DSize, 1f / (k_Lut3DSize - 1f), 0f, 0f)); var colorBalance = ColorUtilities.ComputeColorBalance(settings.temperature.value, settings.tint.value); cmd.SetComputeVectorParam(compute, "_ColorBalance", colorBalance); cmd.SetComputeVectorParam(compute, "_ColorFilter", settings.colorFilter.value); float hue = settings.hueShift.value / 360f; // Remap to [-0.5;0.5] float sat = settings.saturation.value / 100f + 1f; // Remap to [0;2] float con = settings.contrast.value / 100f + 1f; // Remap to [0;2] cmd.SetComputeVectorParam(compute, "_HueSatCon", new Vector4(hue, sat, con, 0f)); var channelMixerR = new Vector4(settings.mixerRedOutRedIn, settings.mixerRedOutGreenIn, settings.mixerRedOutBlueIn, 0f); var channelMixerG = new Vector4(settings.mixerGreenOutRedIn, settings.mixerGreenOutGreenIn, settings.mixerGreenOutBlueIn, 0f); var channelMixerB = new Vector4(settings.mixerBlueOutRedIn, settings.mixerBlueOutGreenIn, settings.mixerBlueOutBlueIn, 0f); cmd.SetComputeVectorParam(compute, "_ChannelMixerRed", channelMixerR / 100f); // Remap to [-2;2] cmd.SetComputeVectorParam(compute, "_ChannelMixerGreen", channelMixerG / 100f); cmd.SetComputeVectorParam(compute, "_ChannelMixerBlue", channelMixerB / 100f); var lift = ColorUtilities.ColorToLift(settings.lift.value * 0.2f); var gain = ColorUtilities.ColorToGain(settings.gain.value * 0.8f); var invgamma = ColorUtilities.ColorToInverseGamma(settings.gamma.value * 0.8f); cmd.SetComputeVectorParam(compute, "_Lift", new Vector4(lift.x, lift.y, lift.z, 0f)); cmd.SetComputeVectorParam(compute, "_InvGamma", new Vector4(invgamma.x, invgamma.y, invgamma.z, 0f)); cmd.SetComputeVectorParam(compute, "_Gain", new Vector4(gain.x, gain.y, gain.z, 0f)); cmd.SetComputeTextureParam(compute, kernel, "_Curves", GetCurveTexture(true)); if (settings.tonemapper.value == Tonemapper.Custom) { m_HableCurve.Init( settings.toneCurveToeStrength.value, settings.toneCurveToeLength.value, settings.toneCurveShoulderStrength.value, settings.toneCurveShoulderLength.value, settings.toneCurveShoulderAngle.value, settings.toneCurveGamma.value ); cmd.SetComputeVectorParam(compute, "_CustomToneCurve", m_HableCurve.uniforms.curve); cmd.SetComputeVectorParam(compute, "_ToeSegmentA", m_HableCurve.uniforms.toeSegmentA); cmd.SetComputeVectorParam(compute, "_ToeSegmentB", m_HableCurve.uniforms.toeSegmentB); cmd.SetComputeVectorParam(compute, "_MidSegmentA", m_HableCurve.uniforms.midSegmentA); cmd.SetComputeVectorParam(compute, "_MidSegmentB", m_HableCurve.uniforms.midSegmentB); cmd.SetComputeVectorParam(compute, "_ShoSegmentA", m_HableCurve.uniforms.shoSegmentA); cmd.SetComputeVectorParam(compute, "_ShoSegmentB", m_HableCurve.uniforms.shoSegmentB); } // Generate the lut context.command.BeginSample("HdrColorGradingLut3D"); cmd.DispatchCompute(compute, kernel, groupSizeXY, groupSizeXY, groupSizeZ); context.command.EndSample("HdrColorGradingLut3D"); } var lut = m_InternalLogLut; var uberSheet = context.uberSheet; uberSheet.EnableKeyword("COLOR_GRADING_HDR_3D"); uberSheet.properties.SetTexture(ShaderIDs.Lut3D, lut); uberSheet.properties.SetVector(ShaderIDs.Lut3D_Params, new Vector2(1f / lut.width, lut.width - 1f)); uberSheet.properties.SetFloat(ShaderIDs.PostExposure, RuntimeUtilities.Exp2(settings.postExposure.value)); context.logLut = lut; } // HDR color pipeline is rendered to a 2D strip lut (works on HDR platforms without compute // and 3D texture support). Precision is sliiiiiiightly lower than when using a 3D texture // LUT (33^3 -> 32^3) but most of the time it's imperceptible. void RenderHDRPipeline2D(PostProcessRenderContext context) { // For the same reasons as in RenderHDRPipeline3D, regen LUT on evey frame { CheckInternalStripLut(); // Lut setup var lutSheet = context.propertySheets.Get(context.resources.shaders.lut2DBaker); lutSheet.ClearKeywords(); lutSheet.properties.SetVector(ShaderIDs.Lut2D_Params, new Vector4(k_Lut2DSize, 0.5f / (k_Lut2DSize * k_Lut2DSize), 0.5f / k_Lut2DSize, k_Lut2DSize / (k_Lut2DSize - 1f))); var colorBalance = ColorUtilities.ComputeColorBalance(settings.temperature.value, settings.tint.value); lutSheet.properties.SetVector(ShaderIDs.ColorBalance, colorBalance); lutSheet.properties.SetVector(ShaderIDs.ColorFilter, settings.colorFilter.value); float hue = settings.hueShift.value / 360f; // Remap to [-0.5;0.5] float sat = settings.saturation.value / 100f + 1f; // Remap to [0;2] float con = settings.contrast.value / 100f + 1f; // Remap to [0;2] lutSheet.properties.SetVector(ShaderIDs.HueSatCon, new Vector3(hue, sat, con)); var channelMixerR = new Vector3(settings.mixerRedOutRedIn, settings.mixerRedOutGreenIn, settings.mixerRedOutBlueIn); var channelMixerG = new Vector3(settings.mixerGreenOutRedIn, settings.mixerGreenOutGreenIn, settings.mixerGreenOutBlueIn); var channelMixerB = new Vector3(settings.mixerBlueOutRedIn, settings.mixerBlueOutGreenIn, settings.mixerBlueOutBlueIn); lutSheet.properties.SetVector(ShaderIDs.ChannelMixerRed, channelMixerR / 100f); // Remap to [-2;2] lutSheet.properties.SetVector(ShaderIDs.ChannelMixerGreen, channelMixerG / 100f); lutSheet.properties.SetVector(ShaderIDs.ChannelMixerBlue, channelMixerB / 100f); var lift = ColorUtilities.ColorToLift(settings.lift.value * 0.2f); var gain = ColorUtilities.ColorToGain(settings.gain.value * 0.8f); var invgamma = ColorUtilities.ColorToInverseGamma(settings.gamma.value * 0.8f); lutSheet.properties.SetVector(ShaderIDs.Lift, lift); lutSheet.properties.SetVector(ShaderIDs.InvGamma, invgamma); lutSheet.properties.SetVector(ShaderIDs.Gain, gain); lutSheet.properties.SetTexture(ShaderIDs.Curves, GetCurveTexture(false)); var tonemapper = settings.tonemapper.value; if (tonemapper == Tonemapper.Custom) { lutSheet.EnableKeyword("TONEMAPPING_CUSTOM"); m_HableCurve.Init( settings.toneCurveToeStrength.value, settings.toneCurveToeLength.value, settings.toneCurveShoulderStrength.value, settings.toneCurveShoulderLength.value, settings.toneCurveShoulderAngle.value, settings.toneCurveGamma.value ); lutSheet.properties.SetVector(ShaderIDs.CustomToneCurve, m_HableCurve.uniforms.curve); lutSheet.properties.SetVector(ShaderIDs.ToeSegmentA, m_HableCurve.uniforms.toeSegmentA); lutSheet.properties.SetVector(ShaderIDs.ToeSegmentB, m_HableCurve.uniforms.toeSegmentB); lutSheet.properties.SetVector(ShaderIDs.MidSegmentA, m_HableCurve.uniforms.midSegmentA); lutSheet.properties.SetVector(ShaderIDs.MidSegmentB, m_HableCurve.uniforms.midSegmentB); lutSheet.properties.SetVector(ShaderIDs.ShoSegmentA, m_HableCurve.uniforms.shoSegmentA); lutSheet.properties.SetVector(ShaderIDs.ShoSegmentB, m_HableCurve.uniforms.shoSegmentB); } else if (tonemapper == Tonemapper.ACES) lutSheet.EnableKeyword("TONEMAPPING_ACES"); else if (tonemapper == Tonemapper.Neutral) lutSheet.EnableKeyword("TONEMAPPING_NEUTRAL"); // Generate the lut context.command.BeginSample("HdrColorGradingLut2D"); context.command.BlitFullscreenTriangle(BuiltinRenderTextureType.None, m_InternalLdrLut, lutSheet, (int)Pass.LutGenHDR2D); context.command.EndSample("HdrColorGradingLut2D"); } var lut = m_InternalLdrLut; var uberSheet = context.uberSheet; uberSheet.EnableKeyword("COLOR_GRADING_HDR_2D"); uberSheet.properties.SetVector(ShaderIDs.Lut2D_Params, new Vector3(1f / lut.width, 1f / lut.height, lut.height - 1f)); uberSheet.properties.SetTexture(ShaderIDs.Lut2D, lut); uberSheet.properties.SetFloat(ShaderIDs.PostExposure, RuntimeUtilities.Exp2(settings.postExposure.value)); } // LDR color pipeline is rendered to a 2D strip lut (works on every platform) void RenderLDRPipeline2D(PostProcessRenderContext context) { // For the same reasons as in RenderHDRPipeline3D, regen LUT on evey frame { CheckInternalStripLut(); // Lut setup var lutSheet = context.propertySheets.Get(context.resources.shaders.lut2DBaker); lutSheet.ClearKeywords(); lutSheet.properties.SetVector(ShaderIDs.Lut2D_Params, new Vector4(k_Lut2DSize, 0.5f / (k_Lut2DSize * k_Lut2DSize), 0.5f / k_Lut2DSize, k_Lut2DSize / (k_Lut2DSize - 1f))); var colorBalance = ColorUtilities.ComputeColorBalance(settings.temperature.value, settings.tint.value); lutSheet.properties.SetVector(ShaderIDs.ColorBalance, colorBalance); lutSheet.properties.SetVector(ShaderIDs.ColorFilter, settings.colorFilter.value); float hue = settings.hueShift.value / 360f; // Remap to [-0.5;0.5] float sat = settings.saturation.value / 100f + 1f; // Remap to [0;2] float con = settings.contrast.value / 100f + 1f; // Remap to [0;2] lutSheet.properties.SetVector(ShaderIDs.HueSatCon, new Vector3(hue, sat, con)); var channelMixerR = new Vector3(settings.mixerRedOutRedIn, settings.mixerRedOutGreenIn, settings.mixerRedOutBlueIn); var channelMixerG = new Vector3(settings.mixerGreenOutRedIn, settings.mixerGreenOutGreenIn, settings.mixerGreenOutBlueIn); var channelMixerB = new Vector3(settings.mixerBlueOutRedIn, settings.mixerBlueOutGreenIn, settings.mixerBlueOutBlueIn); lutSheet.properties.SetVector(ShaderIDs.ChannelMixerRed, channelMixerR / 100f); // Remap to [-2;2] lutSheet.properties.SetVector(ShaderIDs.ChannelMixerGreen, channelMixerG / 100f); lutSheet.properties.SetVector(ShaderIDs.ChannelMixerBlue, channelMixerB / 100f); var lift = ColorUtilities.ColorToLift(settings.lift.value); var gain = ColorUtilities.ColorToGain(settings.gain.value); var invgamma = ColorUtilities.ColorToInverseGamma(settings.gamma.value); lutSheet.properties.SetVector(ShaderIDs.Lift, lift); lutSheet.properties.SetVector(ShaderIDs.InvGamma, invgamma); lutSheet.properties.SetVector(ShaderIDs.Gain, gain); lutSheet.properties.SetFloat(ShaderIDs.Brightness, (settings.brightness.value + 100f) / 100f); lutSheet.properties.SetTexture(ShaderIDs.Curves, GetCurveTexture(false)); // Generate the lut context.command.BeginSample("LdrColorGradingLut2D"); var userLut = settings.ldrLut.value; if (userLut == null) context.command.BlitFullscreenTriangle(BuiltinRenderTextureType.None, m_InternalLdrLut, lutSheet, (int)Pass.LutGenLDRFromScratch); else context.command.BlitFullscreenTriangle(userLut, m_InternalLdrLut, lutSheet, (int)Pass.LutGenLDR); context.command.EndSample("LdrColorGradingLut2D"); } var lut = m_InternalLdrLut; var uberSheet = context.uberSheet; uberSheet.EnableKeyword("COLOR_GRADING_LDR_2D"); uberSheet.properties.SetVector(ShaderIDs.Lut2D_Params, new Vector3(1f / lut.width, 1f / lut.height, lut.height - 1f)); uberSheet.properties.SetTexture(ShaderIDs.Lut2D, lut); } void CheckInternalLogLut() { // Check internal lut state, (re)create it if needed if (m_InternalLogLut == null || !m_InternalLogLut.IsCreated()) { RuntimeUtilities.Destroy(m_InternalLogLut); var format = GetLutFormat(); m_InternalLogLut = new RenderTexture(k_Lut3DSize, k_Lut3DSize, 0, format, RenderTextureReadWrite.Linear) { name = "Color Grading Log Lut", hideFlags = HideFlags.DontSave, filterMode = FilterMode.Bilinear, wrapMode = TextureWrapMode.Clamp, anisoLevel = 0, enableRandomWrite = true, volumeDepth = k_Lut3DSize, dimension = TextureDimension.Tex3D, autoGenerateMips = false, useMipMap = false }; m_InternalLogLut.Create(); } } void CheckInternalStripLut() { // Check internal lut state, (re)create it if needed if (m_InternalLdrLut == null || !m_InternalLdrLut.IsCreated()) { RuntimeUtilities.Destroy(m_InternalLdrLut); var format = GetLutFormat(); m_InternalLdrLut = new RenderTexture(k_Lut2DSize * k_Lut2DSize, k_Lut2DSize, 0, format, RenderTextureReadWrite.Linear) { name = "Color Grading Strip Lut", hideFlags = HideFlags.DontSave, filterMode = FilterMode.Bilinear, wrapMode = TextureWrapMode.Clamp, anisoLevel = 0, autoGenerateMips = false, useMipMap = false }; m_InternalLdrLut.Create(); } } Texture2D GetCurveTexture(bool hdr) { if (m_GradingCurves == null) { var format = GetCurveFormat(); m_GradingCurves = new Texture2D(Spline.k_Precision, 2, format, false, true) { name = "Internal Curves Texture", hideFlags = HideFlags.DontSave, anisoLevel = 0, wrapMode = TextureWrapMode.Clamp, filterMode = FilterMode.Bilinear }; } var hueVsHueCurve = settings.hueVsHueCurve.value; var hueVsSatCurve = settings.hueVsSatCurve.value; var satVsSatCurve = settings.satVsSatCurve.value; var lumVsSatCurve = settings.lumVsSatCurve.value; var masterCurve = settings.masterCurve.value; var redCurve = settings.redCurve.value; var greenCurve = settings.greenCurve.value; var blueCurve = settings.blueCurve.value; var pixels = m_Pixels; for (int i = 0; i < Spline.k_Precision; i++) { // Secondary/VS curves float x = hueVsHueCurve.cachedData[i]; float y = hueVsSatCurve.cachedData[i]; float z = satVsSatCurve.cachedData[i]; float w = lumVsSatCurve.cachedData[i]; pixels[i] = new Color(x, y, z, w); // YRGB if (!hdr) { float m = masterCurve.cachedData[i]; float r = redCurve.cachedData[i]; float g = greenCurve.cachedData[i]; float b = blueCurve.cachedData[i]; pixels[i + Spline.k_Precision] = new Color(r, g, b, m); } } m_GradingCurves.SetPixels(pixels); m_GradingCurves.Apply(false, false); return m_GradingCurves; } static RenderTextureFormat GetLutFormat() { // Use ARGBHalf if possible, fallback on ARGB2101010 and ARGB32 otherwise var format = RenderTextureFormat.ARGBHalf; if (!SystemInfo.SupportsRenderTextureFormat(format)) { format = RenderTextureFormat.ARGB2101010; // Note that using a log lut in ARGB32 is a *very* bad idea but we need it for // compatibility reasons (else if a platform doesn't support one of the previous // format it'll output a black screen, or worse will segfault on the user). if (!SystemInfo.SupportsRenderTextureFormat(format)) format = RenderTextureFormat.ARGB32; } return format; } static TextureFormat GetCurveFormat() { // Use RGBAHalf if possible, fallback on ARGB32 otherwise var format = TextureFormat.RGBAHalf; if (!SystemInfo.SupportsTextureFormat(format)) format = TextureFormat.ARGB32; return format; } public override void Release() { RuntimeUtilities.Destroy(m_InternalLdrLut); m_InternalLdrLut = null; RuntimeUtilities.Destroy(m_InternalLogLut); m_InternalLogLut = null; RuntimeUtilities.Destroy(m_GradingCurves); m_GradingCurves = null; } } }
584
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { public enum KernelSize { Small, Medium, Large, VeryLarge } [Serializable] public sealed class KernelSizeParameter : ParameterOverride<KernelSize> {} [Serializable] [PostProcess(typeof(DepthOfFieldRenderer), "Unity/Depth of Field", false)] public sealed class DepthOfField : PostProcessEffectSettings { [Min(0.1f), Tooltip("Distance to the point of focus.")] public FloatParameter focusDistance = new FloatParameter { value = 10f }; [Range(0.05f, 32f), Tooltip("Ratio of aperture (known as f-stop or f-number). The smaller the value is, the shallower the depth of field is.")] public FloatParameter aperture = new FloatParameter { value = 5.6f }; [Range(1f, 300f), Tooltip("Distance between the lens and the film. The larger the value is, the shallower the depth of field is.")] public FloatParameter focalLength = new FloatParameter { value = 50f }; [DisplayName("Max Blur Size"), Tooltip("Convolution kernel size of the bokeh filter, which determines the maximum radius of bokeh. It also affects performances (the larger the kernel is, the longer the GPU time is required).")] public KernelSizeParameter kernelSize = new KernelSizeParameter { value = KernelSize.Medium }; public override bool IsEnabledAndSupported(PostProcessRenderContext context) { return enabled.value && SystemInfo.graphicsShaderLevel >= 35; } } // TODO: Look into minimum blur amount in the distance, right now it's lerped until a point // TODO: Doesn't play nice with alpha propagation, see if it can be fixed without killing performances public sealed class DepthOfFieldRenderer : PostProcessEffectRenderer<DepthOfField> { enum Pass { CoCCalculation, CoCTemporalFilter, DownsampleAndPrefilter, BokehSmallKernel, BokehMediumKernel, BokehLargeKernel, BokehVeryLargeKernel, PostFilter, Combine, DebugOverlay } // Ping-pong between two history textures as we can't read & write the same target in the // same pass const int k_NumEyes = 2; const int k_NumCoCHistoryTextures = 2; readonly RenderTexture[][] m_CoCHistoryTextures = new RenderTexture[k_NumEyes][]; int[] m_HistoryPingPong = new int[k_NumEyes]; // Height of the 35mm full-frame format (36mm x 24mm) // TODO: Should be set by a physical camera const float k_FilmHeight = 0.024f; public DepthOfFieldRenderer() { for (int eye = 0; eye < k_NumEyes; eye++) { m_CoCHistoryTextures[eye] = new RenderTexture[k_NumCoCHistoryTextures]; m_HistoryPingPong[eye] = 0; } } public override DepthTextureMode GetCameraFlags() { return DepthTextureMode.Depth; } RenderTextureFormat SelectFormat(RenderTextureFormat primary, RenderTextureFormat secondary) { if (SystemInfo.SupportsRenderTextureFormat(primary)) return primary; if (SystemInfo.SupportsRenderTextureFormat(secondary)) return secondary; return RenderTextureFormat.Default; } float CalculateMaxCoCRadius(int screenHeight) { // Estimate the allowable maximum radius of CoC from the kernel // size (the equation below was empirically derived). float radiusInPixels = (float)settings.kernelSize.value * 4f + 6f; // Applying a 5% limit to the CoC radius to keep the size of // TileMax/NeighborMax small enough. return Mathf.Min(0.05f, radiusInPixels / screenHeight); } RenderTexture CheckHistory(int eye, int id, PostProcessRenderContext context, RenderTextureFormat format) { var rt = m_CoCHistoryTextures[eye][id]; if (m_ResetHistory || rt == null || !rt.IsCreated() || rt.width != context.width || rt.height != context.height) { RenderTexture.ReleaseTemporary(rt); // TODO: The CoCCalculation CoCTex uses RenderTextureReadWrite.Linear, why isn't this? rt = context.GetScreenSpaceTemporaryRT(0, format); rt.name = "CoC History, Eye: " + eye + ", ID: " + id; rt.filterMode = FilterMode.Bilinear; rt.Create(); m_CoCHistoryTextures[eye][id] = rt; } return rt; } public override void Render(PostProcessRenderContext context) { var colorFormat = RenderTextureFormat.DefaultHDR; var cocFormat = SelectFormat(RenderTextureFormat.R8, RenderTextureFormat.RHalf); // Avoid using R8 on OSX with Metal. #896121, https://goo.gl/MgKqu6 #if (UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX) && !UNITY_2017_1_OR_NEWER if (SystemInfo.graphicsDeviceType == UnityEngine.Rendering.GraphicsDeviceType.Metal) cocFormat = SelectFormat(RenderTextureFormat.RHalf, RenderTextureFormat.Default); #endif // Material setup var f = settings.focalLength.value / 1000f; var s1 = Mathf.Max(settings.focusDistance.value, f); var aspect = (float)context.screenWidth / (float)context.screenHeight; var coeff = f * f / (settings.aperture.value * (s1 - f) * k_FilmHeight * 2); var maxCoC = CalculateMaxCoCRadius(context.screenHeight); var sheet = context.propertySheets.Get(context.resources.shaders.depthOfField); sheet.properties.Clear(); sheet.properties.SetFloat(ShaderIDs.Distance, s1); sheet.properties.SetFloat(ShaderIDs.LensCoeff, coeff); sheet.properties.SetFloat(ShaderIDs.MaxCoC, maxCoC); sheet.properties.SetFloat(ShaderIDs.RcpMaxCoC, 1f / maxCoC); sheet.properties.SetFloat(ShaderIDs.RcpAspect, 1f / aspect); var cmd = context.command; cmd.BeginSample("DepthOfField"); // CoC calculation pass context.GetScreenSpaceTemporaryRT(cmd, ShaderIDs.CoCTex, 0, cocFormat, RenderTextureReadWrite.Linear); cmd.BlitFullscreenTriangle(BuiltinRenderTextureType.None, ShaderIDs.CoCTex, sheet, (int)Pass.CoCCalculation); // CoC temporal filter pass when TAA is enabled if (context.IsTemporalAntialiasingActive()) { float motionBlending = context.temporalAntialiasing.motionBlending; float blend = m_ResetHistory ? 0f : motionBlending; // Handles first frame blending var jitter = context.temporalAntialiasing.jitter; sheet.properties.SetVector(ShaderIDs.TaaParams, new Vector3(jitter.x, jitter.y, blend)); int pp = m_HistoryPingPong[context.xrActiveEye]; var historyRead = CheckHistory(context.xrActiveEye, ++pp % 2, context, cocFormat); var historyWrite = CheckHistory(context.xrActiveEye, ++pp % 2, context, cocFormat); m_HistoryPingPong[context.xrActiveEye] = ++pp % 2; cmd.BlitFullscreenTriangle(historyRead, historyWrite, sheet, (int)Pass.CoCTemporalFilter); cmd.ReleaseTemporaryRT(ShaderIDs.CoCTex); cmd.SetGlobalTexture(ShaderIDs.CoCTex, historyWrite); } // Downsampling and prefiltering pass context.GetScreenSpaceTemporaryRT(cmd, ShaderIDs.DepthOfFieldTex, 0, colorFormat, RenderTextureReadWrite.Default, FilterMode.Bilinear, context.width / 2, context.height / 2); cmd.BlitFullscreenTriangle(context.source, ShaderIDs.DepthOfFieldTex, sheet, (int)Pass.DownsampleAndPrefilter); // Bokeh simulation pass context.GetScreenSpaceTemporaryRT(cmd, ShaderIDs.DepthOfFieldTemp, 0, colorFormat, RenderTextureReadWrite.Default, FilterMode.Bilinear, context.width / 2, context.height / 2); cmd.BlitFullscreenTriangle(ShaderIDs.DepthOfFieldTex, ShaderIDs.DepthOfFieldTemp, sheet, (int)Pass.BokehSmallKernel + (int)settings.kernelSize.value); // Postfilter pass cmd.BlitFullscreenTriangle(ShaderIDs.DepthOfFieldTemp, ShaderIDs.DepthOfFieldTex, sheet, (int)Pass.PostFilter); cmd.ReleaseTemporaryRT(ShaderIDs.DepthOfFieldTemp); // Debug overlay pass if (context.IsDebugOverlayEnabled(DebugOverlay.DepthOfField)) context.PushDebugOverlay(cmd, context.source, sheet, (int)Pass.DebugOverlay); // Combine pass cmd.BlitFullscreenTriangle(context.source, context.destination, sheet, (int)Pass.Combine); cmd.ReleaseTemporaryRT(ShaderIDs.DepthOfFieldTex); if (!context.IsTemporalAntialiasingActive()) cmd.ReleaseTemporaryRT(ShaderIDs.CoCTex); cmd.EndSample("DepthOfField"); m_ResetHistory = false; } public override void Release() { for (int eye = 0; eye < k_NumEyes; eye++) { for (int i = 0; i < m_CoCHistoryTextures[eye].Length; i++) { RenderTexture.ReleaseTemporary(m_CoCHistoryTextures[eye][i]); m_CoCHistoryTextures[eye][i] = null; } m_HistoryPingPong[eye] = 0; } ResetHistory(); } } }
219
orrb
openai
C#
using System; using UnityEngine.Assertions; namespace UnityEngine.Rendering.PostProcessing { [Serializable] public sealed class Dithering { int m_NoiseTextureIndex = 0; internal void Render(PostProcessRenderContext context) { var blueNoise = context.resources.blueNoise64; Assert.IsTrue(blueNoise != null && blueNoise.Length > 0); #if POSTFX_DEBUG_STATIC_DITHERING // Used by QA for automated testing m_NoiseTextureIndex = 0; float rndOffsetX = 0f; float rndOffsetY = 0f; #else if (++m_NoiseTextureIndex >= blueNoise.Length) m_NoiseTextureIndex = 0; float rndOffsetX = Random.value; float rndOffsetY = Random.value; #endif var noiseTex = blueNoise[m_NoiseTextureIndex]; var uberSheet = context.uberSheet; uberSheet.properties.SetTexture(ShaderIDs.DitheringTex, noiseTex); uberSheet.properties.SetVector(ShaderIDs.Dithering_Coords, new Vector4( (float)context.screenWidth / (float)noiseTex.width, (float)context.screenHeight / (float)noiseTex.height, rndOffsetX, rndOffsetY )); } } }
41
orrb
openai
C#
using System; using UnityEngine.Serialization; namespace UnityEngine.Rendering.PostProcessing { [Serializable] public sealed class FastApproximateAntialiasing { [FormerlySerializedAs("mobileOptimized")] [Tooltip("Boost performances by lowering the effect quality. This settings is meant to be used on mobile and other low-end platforms but can also provide a nice performance boost on desktops and consoles.")] public bool fastMode = false; [Tooltip("Keep alpha channel. This will slightly lower the effect quality but allows rendering against a transparent background.")] public bool keepAlpha = false; } }
17
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { [Serializable] public sealed class Fog { [Tooltip("Enables the internal deferred fog pass. Actual fog settings should be set in the Lighting panel.")] public bool enabled = true; [Tooltip("Should the fog affect the skybox?")] public bool excludeSkybox = true; internal DepthTextureMode GetCameraFlags() { return DepthTextureMode.Depth; } internal bool IsEnabledAndSupported(PostProcessRenderContext context) { return enabled && RenderSettings.fog && !RuntimeUtilities.scriptableRenderPipelineActive && context.camera.actualRenderingPath == RenderingPath.DeferredShading; // In forward fog is already done at shader level } internal void Render(PostProcessRenderContext context) { var sheet = context.propertySheets.Get(context.resources.shaders.deferredFog); sheet.ClearKeywords(); var fogColor = RuntimeUtilities.isLinearColorSpace ? RenderSettings.fogColor.linear : RenderSettings.fogColor; sheet.properties.SetVector(ShaderIDs.FogColor, fogColor); sheet.properties.SetVector(ShaderIDs.FogParams, new Vector3(RenderSettings.fogDensity, RenderSettings.fogStartDistance, RenderSettings.fogEndDistance)); var cmd = context.command; cmd.BlitFullscreenTriangle(context.source, context.destination, sheet, excludeSkybox ? 1 : 0); } } }
41
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { [Serializable] [PostProcess(typeof(GrainRenderer), "Unity/Grain")] public sealed class Grain : PostProcessEffectSettings { [Tooltip("Enable the use of colored grain.")] public BoolParameter colored = new BoolParameter { value = true }; [Range(0f, 1f), Tooltip("Grain strength. Higher means more visible grain.")] public FloatParameter intensity = new FloatParameter { value = 0f }; [Range(0.3f, 3f), Tooltip("Grain particle size.")] public FloatParameter size = new FloatParameter { value = 1f }; [Range(0f, 1f), DisplayName("Luminance Contribution"), Tooltip("Controls the noisiness response curve based on scene luminance. Lower values mean less noise in dark areas.")] public FloatParameter lumContrib = new FloatParameter { value = 0.8f }; public override bool IsEnabledAndSupported(PostProcessRenderContext context) { return enabled.value && intensity.value > 0f; } } public sealed class GrainRenderer : PostProcessEffectRenderer<Grain> { RenderTexture m_GrainLookupRT; const int k_SampleCount = 1024; int m_SampleIndex; public override void Render(PostProcessRenderContext context) { #if POSTFX_DEBUG_STATIC_GRAIN // Chosen by a fair dice roll float time = 4f; float rndOffsetX = 0f; float rndOffsetY = 0f; #else float time = Time.realtimeSinceStartup; float rndOffsetX = HaltonSeq.Get(m_SampleIndex & 1023, 2); float rndOffsetY = HaltonSeq.Get(m_SampleIndex & 1023, 3); if (++m_SampleIndex >= k_SampleCount) m_SampleIndex = 0; #endif // Generate the grain lut for the current frame first if (m_GrainLookupRT == null || !m_GrainLookupRT.IsCreated()) { RuntimeUtilities.Destroy(m_GrainLookupRT); m_GrainLookupRT = new RenderTexture(128, 128, 0, GetLookupFormat()) { filterMode = FilterMode.Bilinear, wrapMode = TextureWrapMode.Repeat, anisoLevel = 0, name = "Grain Lookup Texture" }; m_GrainLookupRT.Create(); } var sheet = context.propertySheets.Get(context.resources.shaders.grainBaker); sheet.properties.Clear(); sheet.properties.SetFloat(ShaderIDs.Phase, time % 10f); context.command.BeginSample("GrainLookup"); context.command.BlitFullscreenTriangle(BuiltinRenderTextureType.None, m_GrainLookupRT, sheet, settings.colored.value ? 1 : 0); context.command.EndSample("GrainLookup"); // Send everything to the uber shader var uberSheet = context.uberSheet; uberSheet.EnableKeyword("GRAIN"); uberSheet.properties.SetTexture(ShaderIDs.GrainTex, m_GrainLookupRT); uberSheet.properties.SetVector(ShaderIDs.Grain_Params1, new Vector2(settings.lumContrib.value, settings.intensity.value * 20f)); uberSheet.properties.SetVector(ShaderIDs.Grain_Params2, new Vector4((float)context.width / (float)m_GrainLookupRT.width / settings.size.value, (float)context.height / (float)m_GrainLookupRT.height / settings.size.value, rndOffsetX, rndOffsetY)); } RenderTextureFormat GetLookupFormat() { if (SystemInfo.SupportsRenderTextureFormat(RenderTextureFormat.ARGBHalf)) return RenderTextureFormat.ARGBHalf; return RenderTextureFormat.ARGB32; } public override void Release() { RuntimeUtilities.Destroy(m_GrainLookupRT); m_GrainLookupRT = null; m_SampleIndex = 0; } } }
99
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { [Serializable] [PostProcess(typeof(MotionBlurRenderer), "Unity/Motion Blur", false)] public sealed class MotionBlur : PostProcessEffectSettings { [Range(0f, 360f), Tooltip("The angle of rotary shutter. Larger values give longer exposure.")] public FloatParameter shutterAngle = new FloatParameter { value = 270f }; [Range(4, 32), Tooltip("The amount of sample points, which affects quality and performances.")] public IntParameter sampleCount = new IntParameter { value = 10 }; public override bool IsEnabledAndSupported(PostProcessRenderContext context) { return enabled.value && shutterAngle.value > 0f #if UNITY_EDITOR // Don't render motion blur preview when the editor is not playing as it can in some // cases results in ugly artifacts (i.e. when resizing the game view). && Application.isPlaying #endif && SystemInfo.supportsMotionVectors && SystemInfo.SupportsRenderTextureFormat(RenderTextureFormat.RGHalf) && !RuntimeUtilities.isVREnabled; } } public sealed class MotionBlurRenderer : PostProcessEffectRenderer<MotionBlur> { enum Pass { VelocitySetup, TileMax1, TileMax2, TileMaxV, NeighborMax, Reconstruction } public override DepthTextureMode GetCameraFlags() { return DepthTextureMode.Depth | DepthTextureMode.MotionVectors; } public override void Render(PostProcessRenderContext context) { var cmd = context.command; if (m_ResetHistory) { cmd.BlitFullscreenTriangle(context.source, context.destination); m_ResetHistory = false; return; } const float kMaxBlurRadius = 5f; var vectorRTFormat = RenderTextureFormat.RGHalf; var packedRTFormat = SystemInfo.SupportsRenderTextureFormat(RenderTextureFormat.ARGB2101010) ? RenderTextureFormat.ARGB2101010 : RenderTextureFormat.ARGB32; var sheet = context.propertySheets.Get(context.resources.shaders.motionBlur); cmd.BeginSample("MotionBlur"); // Calculate the maximum blur radius in pixels. int maxBlurPixels = (int)(kMaxBlurRadius * context.height / 100); // Calculate the TileMax size. // It should be a multiple of 8 and larger than maxBlur. int tileSize = ((maxBlurPixels - 1) / 8 + 1) * 8; // Pass 1 - Velocity/depth packing var velocityScale = settings.shutterAngle / 360f; sheet.properties.SetFloat(ShaderIDs.VelocityScale, velocityScale); sheet.properties.SetFloat(ShaderIDs.MaxBlurRadius, maxBlurPixels); sheet.properties.SetFloat(ShaderIDs.RcpMaxBlurRadius, 1f / maxBlurPixels); int vbuffer = ShaderIDs.VelocityTex; cmd.GetTemporaryRT(vbuffer, context.width, context.height, 0, FilterMode.Point, packedRTFormat, RenderTextureReadWrite.Linear); cmd.BlitFullscreenTriangle(BuiltinRenderTextureType.None, vbuffer, sheet, (int)Pass.VelocitySetup); // Pass 2 - First TileMax filter (1/2 downsize) int tile2 = ShaderIDs.Tile2RT; cmd.GetTemporaryRT(tile2, context.width / 2, context.height / 2, 0, FilterMode.Point, vectorRTFormat, RenderTextureReadWrite.Linear); cmd.BlitFullscreenTriangle(vbuffer, tile2, sheet, (int)Pass.TileMax1); // Pass 3 - Second TileMax filter (1/2 downsize) int tile4 = ShaderIDs.Tile4RT; cmd.GetTemporaryRT(tile4, context.width / 4, context.height / 4, 0, FilterMode.Point, vectorRTFormat, RenderTextureReadWrite.Linear); cmd.BlitFullscreenTriangle(tile2, tile4, sheet, (int)Pass.TileMax2); cmd.ReleaseTemporaryRT(tile2); // Pass 4 - Third TileMax filter (1/2 downsize) int tile8 = ShaderIDs.Tile8RT; cmd.GetTemporaryRT(tile8, context.width / 8, context.height / 8, 0, FilterMode.Point, vectorRTFormat, RenderTextureReadWrite.Linear); cmd.BlitFullscreenTriangle(tile4, tile8, sheet, (int)Pass.TileMax2); cmd.ReleaseTemporaryRT(tile4); // Pass 5 - Fourth TileMax filter (reduce to tileSize) var tileMaxOffs = Vector2.one * (tileSize / 8f - 1f) * -0.5f; sheet.properties.SetVector(ShaderIDs.TileMaxOffs, tileMaxOffs); sheet.properties.SetFloat(ShaderIDs.TileMaxLoop, (int)(tileSize / 8f)); int tile = ShaderIDs.TileVRT; cmd.GetTemporaryRT(tile, context.width / tileSize, context.height / tileSize, 0, FilterMode.Point, vectorRTFormat, RenderTextureReadWrite.Linear); cmd.BlitFullscreenTriangle(tile8, tile, sheet, (int)Pass.TileMaxV); cmd.ReleaseTemporaryRT(tile8); // Pass 6 - NeighborMax filter int neighborMax = ShaderIDs.NeighborMaxTex; int neighborMaxWidth = context.width / tileSize; int neighborMaxHeight = context.height / tileSize; cmd.GetTemporaryRT(neighborMax, neighborMaxWidth, neighborMaxHeight, 0, FilterMode.Point, vectorRTFormat, RenderTextureReadWrite.Linear); cmd.BlitFullscreenTriangle(tile, neighborMax, sheet, (int)Pass.NeighborMax); cmd.ReleaseTemporaryRT(tile); // Pass 7 - Reconstruction pass sheet.properties.SetFloat(ShaderIDs.LoopCount, Mathf.Clamp(settings.sampleCount / 2, 1, 64)); cmd.BlitFullscreenTriangle(context.source, context.destination, sheet, (int)Pass.Reconstruction); cmd.ReleaseTemporaryRT(vbuffer); cmd.ReleaseTemporaryRT(neighborMax); cmd.EndSample("MotionBlur"); } } }
135
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { // Multi-scale volumetric obscurance // TODO: Fix VR support #if UNITY_2017_1_OR_NEWER [Serializable] public sealed class MultiScaleVO : IAmbientOcclusionMethod { internal enum MipLevel { Original, L1, L2, L3, L4, L5, L6 } enum Pass { DepthCopy, CompositionDeferred, CompositionForward, DebugOverlay } // The arrays below are reused between frames to reduce GC allocation. readonly float[] m_SampleThickness = { Mathf.Sqrt(1f - 0.2f * 0.2f), Mathf.Sqrt(1f - 0.4f * 0.4f), Mathf.Sqrt(1f - 0.6f * 0.6f), Mathf.Sqrt(1f - 0.8f * 0.8f), Mathf.Sqrt(1f - 0.2f * 0.2f - 0.2f * 0.2f), Mathf.Sqrt(1f - 0.2f * 0.2f - 0.4f * 0.4f), Mathf.Sqrt(1f - 0.2f * 0.2f - 0.6f * 0.6f), Mathf.Sqrt(1f - 0.2f * 0.2f - 0.8f * 0.8f), Mathf.Sqrt(1f - 0.4f * 0.4f - 0.4f * 0.4f), Mathf.Sqrt(1f - 0.4f * 0.4f - 0.6f * 0.6f), Mathf.Sqrt(1f - 0.4f * 0.4f - 0.8f * 0.8f), Mathf.Sqrt(1f - 0.6f * 0.6f - 0.6f * 0.6f) }; readonly float[] m_InvThicknessTable = new float[12]; readonly float[] m_SampleWeightTable = new float[12]; readonly int[] m_Widths = new int[7]; readonly int[] m_Heights = new int[7]; AmbientOcclusion m_Settings; PropertySheet m_PropertySheet; PostProcessResources m_Resources; // Can't use a temporary because we need to share it between cmdbuffers - also fixes a weird // command buffer warning RenderTexture m_AmbientOnlyAO; readonly RenderTargetIdentifier[] m_MRT = { BuiltinRenderTextureType.GBuffer0, // Albedo, Occ BuiltinRenderTextureType.CameraTarget // Ambient }; public MultiScaleVO(AmbientOcclusion settings) { m_Settings = settings; } public DepthTextureMode GetCameraFlags() { return DepthTextureMode.Depth; } // Special case for AO [because SRPs], please don't do this in other effects, it's bad // practice in this framework public void SetResources(PostProcessResources resources) { m_Resources = resources; } void Alloc(CommandBuffer cmd, int id, MipLevel size, RenderTextureFormat format, bool uav) { int sizeId = (int)size; cmd.GetTemporaryRT(id, new RenderTextureDescriptor { width = m_Widths[sizeId], height = m_Heights[sizeId], colorFormat = format, depthBufferBits = 0, autoGenerateMips = false, msaaSamples = 1, enableRandomWrite = uav, dimension = TextureDimension.Tex2D, sRGB = false }, FilterMode.Point); } void AllocArray(CommandBuffer cmd, int id, MipLevel size, RenderTextureFormat format, bool uav) { int sizeId = (int)size; cmd.GetTemporaryRT(id, new RenderTextureDescriptor { width = m_Widths[sizeId], height = m_Heights[sizeId], colorFormat = format, depthBufferBits = 0, volumeDepth = 16, autoGenerateMips = false, msaaSamples = 1, enableRandomWrite = uav, dimension = TextureDimension.Tex2DArray, sRGB = false }, FilterMode.Point); } void Release(CommandBuffer cmd, int id) { cmd.ReleaseTemporaryRT(id); } // Calculate values in _ZBuferParams (built-in shader variable) // We can't use _ZBufferParams in compute shaders, so this function is // used to give the values in it to compute shaders. Vector4 CalculateZBufferParams(Camera camera) { float fpn = camera.farClipPlane / camera.nearClipPlane; if (SystemInfo.usesReversedZBuffer) return new Vector4(fpn - 1f, 1f, 0f, 0f); return new Vector4(1f - fpn, fpn, 0f, 0f); } float CalculateTanHalfFovHeight(Camera camera) { return 1f / camera.projectionMatrix[0, 0]; } Vector2 GetSize(MipLevel mip) { return new Vector2(m_Widths[(int)mip], m_Heights[(int)mip]); } Vector3 GetSizeArray(MipLevel mip) { return new Vector3(m_Widths[(int)mip], m_Heights[(int)mip], 16); } public void GenerateAOMap(CommandBuffer cmd, Camera camera, RenderTargetIdentifier destination, RenderTargetIdentifier? depthMap, bool invert) { // Base size m_Widths[0] = camera.pixelWidth * (RuntimeUtilities.isSinglePassStereoEnabled ? 2 : 1); m_Heights[0] = camera.pixelHeight; // L1 -> L6 sizes for (int i = 1; i < 7; i++) { int div = 1 << i; m_Widths[i] = (m_Widths[0] + (div - 1)) / div; m_Heights[i] = (m_Heights[0] + (div - 1)) / div; } // Allocate temporary textures PushAllocCommands(cmd); // Render logic PushDownsampleCommands(cmd, camera, depthMap); float tanHalfFovH = CalculateTanHalfFovHeight(camera); PushRenderCommands(cmd, ShaderIDs.TiledDepth1, ShaderIDs.Occlusion1, GetSizeArray(MipLevel.L3), tanHalfFovH); PushRenderCommands(cmd, ShaderIDs.TiledDepth2, ShaderIDs.Occlusion2, GetSizeArray(MipLevel.L4), tanHalfFovH); PushRenderCommands(cmd, ShaderIDs.TiledDepth3, ShaderIDs.Occlusion3, GetSizeArray(MipLevel.L5), tanHalfFovH); PushRenderCommands(cmd, ShaderIDs.TiledDepth4, ShaderIDs.Occlusion4, GetSizeArray(MipLevel.L6), tanHalfFovH); PushUpsampleCommands(cmd, ShaderIDs.LowDepth4, ShaderIDs.Occlusion4, ShaderIDs.LowDepth3, ShaderIDs.Occlusion3, ShaderIDs.Combined3, GetSize(MipLevel.L4), GetSize(MipLevel.L3)); PushUpsampleCommands(cmd, ShaderIDs.LowDepth3, ShaderIDs.Combined3, ShaderIDs.LowDepth2, ShaderIDs.Occlusion2, ShaderIDs.Combined2, GetSize(MipLevel.L3), GetSize(MipLevel.L2)); PushUpsampleCommands(cmd, ShaderIDs.LowDepth2, ShaderIDs.Combined2, ShaderIDs.LowDepth1, ShaderIDs.Occlusion1, ShaderIDs.Combined1, GetSize(MipLevel.L2), GetSize(MipLevel.L1)); PushUpsampleCommands(cmd, ShaderIDs.LowDepth1, ShaderIDs.Combined1, ShaderIDs.LinearDepth, null, destination, GetSize(MipLevel.L1), GetSize(MipLevel.Original), invert); // Cleanup PushReleaseCommands(cmd); } void PushAllocCommands(CommandBuffer cmd) { Alloc(cmd, ShaderIDs.LinearDepth, MipLevel.Original, RenderTextureFormat.RHalf, true); Alloc(cmd, ShaderIDs.LowDepth1, MipLevel.L1, RenderTextureFormat.RFloat, true); Alloc(cmd, ShaderIDs.LowDepth2, MipLevel.L2, RenderTextureFormat.RFloat, true); Alloc(cmd, ShaderIDs.LowDepth3, MipLevel.L3, RenderTextureFormat.RFloat, true); Alloc(cmd, ShaderIDs.LowDepth4, MipLevel.L4, RenderTextureFormat.RFloat, true); AllocArray(cmd, ShaderIDs.TiledDepth1, MipLevel.L3, RenderTextureFormat.RHalf, true); AllocArray(cmd, ShaderIDs.TiledDepth2, MipLevel.L4, RenderTextureFormat.RHalf, true); AllocArray(cmd, ShaderIDs.TiledDepth3, MipLevel.L5, RenderTextureFormat.RHalf, true); AllocArray(cmd, ShaderIDs.TiledDepth4, MipLevel.L6, RenderTextureFormat.RHalf, true); Alloc(cmd, ShaderIDs.Occlusion1, MipLevel.L1, RenderTextureFormat.R8, true); Alloc(cmd, ShaderIDs.Occlusion2, MipLevel.L2, RenderTextureFormat.R8, true); Alloc(cmd, ShaderIDs.Occlusion3, MipLevel.L3, RenderTextureFormat.R8, true); Alloc(cmd, ShaderIDs.Occlusion4, MipLevel.L4, RenderTextureFormat.R8, true); Alloc(cmd, ShaderIDs.Combined1, MipLevel.L1, RenderTextureFormat.R8, true); Alloc(cmd, ShaderIDs.Combined2, MipLevel.L2, RenderTextureFormat.R8, true); Alloc(cmd, ShaderIDs.Combined3, MipLevel.L3, RenderTextureFormat.R8, true); } void PushDownsampleCommands(CommandBuffer cmd, Camera camera, RenderTargetIdentifier? depthMap) { RenderTargetIdentifier depthMapId; bool needDepthMapRelease = false; if (depthMap != null) { depthMapId = depthMap.Value; } else { // Make a copy of the depth texture, or reuse the resolved depth // buffer (it's only available in some specific situations). if (!RuntimeUtilities.IsResolvedDepthAvailable(camera)) { Alloc(cmd, ShaderIDs.DepthCopy, MipLevel.Original, RenderTextureFormat.RFloat, false); depthMapId = new RenderTargetIdentifier(ShaderIDs.DepthCopy); cmd.BlitFullscreenTriangle(BuiltinRenderTextureType.None, depthMapId, m_PropertySheet, (int)Pass.DepthCopy); needDepthMapRelease = true; } else { depthMapId = BuiltinRenderTextureType.ResolvedDepth; } } // 1st downsampling pass. var cs = m_Resources.computeShaders.multiScaleAODownsample1; int kernel = cs.FindKernel("main"); cmd.SetComputeTextureParam(cs, kernel, "LinearZ", ShaderIDs.LinearDepth); cmd.SetComputeTextureParam(cs, kernel, "DS2x", ShaderIDs.LowDepth1); cmd.SetComputeTextureParam(cs, kernel, "DS4x", ShaderIDs.LowDepth2); cmd.SetComputeTextureParam(cs, kernel, "DS2xAtlas", ShaderIDs.TiledDepth1); cmd.SetComputeTextureParam(cs, kernel, "DS4xAtlas", ShaderIDs.TiledDepth2); cmd.SetComputeVectorParam(cs, "ZBufferParams", CalculateZBufferParams(camera)); cmd.SetComputeTextureParam(cs, kernel, "Depth", depthMapId); cmd.DispatchCompute(cs, kernel, m_Widths[(int)MipLevel.L4], m_Heights[(int)MipLevel.L4], 1); if (needDepthMapRelease) Release(cmd, ShaderIDs.DepthCopy); // 2nd downsampling pass. cs = m_Resources.computeShaders.multiScaleAODownsample2; kernel = cs.FindKernel("main"); cmd.SetComputeTextureParam(cs, kernel, "DS4x", ShaderIDs.LowDepth2); cmd.SetComputeTextureParam(cs, kernel, "DS8x", ShaderIDs.LowDepth3); cmd.SetComputeTextureParam(cs, kernel, "DS16x", ShaderIDs.LowDepth4); cmd.SetComputeTextureParam(cs, kernel, "DS8xAtlas", ShaderIDs.TiledDepth3); cmd.SetComputeTextureParam(cs, kernel, "DS16xAtlas", ShaderIDs.TiledDepth4); cmd.DispatchCompute(cs, kernel, m_Widths[(int)MipLevel.L6], m_Heights[(int)MipLevel.L6], 1); } void PushRenderCommands(CommandBuffer cmd, int source, int destination, Vector3 sourceSize, float tanHalfFovH) { // Here we compute multipliers that convert the center depth value into (the reciprocal // of) sphere thicknesses at each sample location. This assumes a maximum sample radius // of 5 units, but since a sphere has no thickness at its extent, we don't need to // sample that far out. Only samples whole integer offsets with distance less than 25 // are used. This means that there is no sample at (3, 4) because its distance is // exactly 25 (and has a thickness of 0.) // The shaders are set up to sample a circular region within a 5-pixel radius. const float kScreenspaceDiameter = 10f; // SphereDiameter = CenterDepth * ThicknessMultiplier. This will compute the thickness // of a sphere centered at a specific depth. The ellipsoid scale can stretch a sphere // into an ellipsoid, which changes the characteristics of the AO. // TanHalfFovH: Radius of sphere in depth units if its center lies at Z = 1 // ScreenspaceDiameter: Diameter of sample sphere in pixel units // ScreenspaceDiameter / BufferWidth: Ratio of the screen width that the sphere actually covers float thicknessMultiplier = 2f * tanHalfFovH * kScreenspaceDiameter / sourceSize.x; if (RuntimeUtilities.isSinglePassStereoEnabled) thicknessMultiplier *= 2f; // This will transform a depth value from [0, thickness] to [0, 1]. float inverseRangeFactor = 1f / thicknessMultiplier; // The thicknesses are smaller for all off-center samples of the sphere. Compute // thicknesses relative to the center sample. for (int i = 0; i < 12; i++) m_InvThicknessTable[i] = inverseRangeFactor / m_SampleThickness[i]; // These are the weights that are multiplied against the samples because not all samples // are equally important. The farther the sample is from the center location, the less // they matter. We use the thickness of the sphere to determine the weight. The scalars // in front are the number of samples with this weight because we sum the samples // together before multiplying by the weight, so as an aggregate all of those samples // matter more. After generating this table, the weights are normalized. m_SampleWeightTable[ 0] = 4 * m_SampleThickness[ 0]; // Axial m_SampleWeightTable[ 1] = 4 * m_SampleThickness[ 1]; // Axial m_SampleWeightTable[ 2] = 4 * m_SampleThickness[ 2]; // Axial m_SampleWeightTable[ 3] = 4 * m_SampleThickness[ 3]; // Axial m_SampleWeightTable[ 4] = 4 * m_SampleThickness[ 4]; // Diagonal m_SampleWeightTable[ 5] = 8 * m_SampleThickness[ 5]; // L-shaped m_SampleWeightTable[ 6] = 8 * m_SampleThickness[ 6]; // L-shaped m_SampleWeightTable[ 7] = 8 * m_SampleThickness[ 7]; // L-shaped m_SampleWeightTable[ 8] = 4 * m_SampleThickness[ 8]; // Diagonal m_SampleWeightTable[ 9] = 8 * m_SampleThickness[ 9]; // L-shaped m_SampleWeightTable[10] = 8 * m_SampleThickness[10]; // L-shaped m_SampleWeightTable[11] = 4 * m_SampleThickness[11]; // Diagonal // Zero out the unused samples. // FIXME: should we support SAMPLE_EXHAUSTIVELY mode? m_SampleWeightTable[0] = 0; m_SampleWeightTable[2] = 0; m_SampleWeightTable[5] = 0; m_SampleWeightTable[7] = 0; m_SampleWeightTable[9] = 0; // Normalize the weights by dividing by the sum of all weights var totalWeight = 0f; foreach (float w in m_SampleWeightTable) totalWeight += w; for (int i = 0; i < m_SampleWeightTable.Length; i++) m_SampleWeightTable[i] /= totalWeight; // Set the arguments for the render kernel. var cs = m_Resources.computeShaders.multiScaleAORender; int kernel = cs.FindKernel("main_interleaved"); cmd.SetComputeFloatParams(cs, "gInvThicknessTable", m_InvThicknessTable); cmd.SetComputeFloatParams(cs, "gSampleWeightTable", m_SampleWeightTable); cmd.SetComputeVectorParam(cs, "gInvSliceDimension", new Vector2(1f / sourceSize.x, 1f / sourceSize.y)); cmd.SetComputeVectorParam(cs, "AdditionalParams", new Vector2(-1f / m_Settings.thicknessModifier.value, m_Settings.intensity.value)); cmd.SetComputeTextureParam(cs, kernel, "DepthTex", source); cmd.SetComputeTextureParam(cs, kernel, "Occlusion", destination); // Calculate the thread group count and add a dispatch command with them. uint xsize, ysize, zsize; cs.GetKernelThreadGroupSizes(kernel, out xsize, out ysize, out zsize); cmd.DispatchCompute( cs, kernel, ((int)sourceSize.x + (int)xsize - 1) / (int)xsize, ((int)sourceSize.y + (int)ysize - 1) / (int)ysize, ((int)sourceSize.z + (int)zsize - 1) / (int)zsize ); } void PushUpsampleCommands(CommandBuffer cmd, int lowResDepth, int interleavedAO, int highResDepth, int? highResAO, RenderTargetIdentifier dest, Vector3 lowResDepthSize, Vector2 highResDepthSize, bool invert = false) { var cs = m_Resources.computeShaders.multiScaleAOUpsample; int kernel = cs.FindKernel(highResAO == null ? invert ? "main_invert" : "main" : "main_blendout"); float stepSize = 1920f / lowResDepthSize.x; float bTolerance = 1f - Mathf.Pow(10f, m_Settings.blurTolerance.value) * stepSize; bTolerance *= bTolerance; float uTolerance = Mathf.Pow(10f, m_Settings.upsampleTolerance.value); float noiseFilterWeight = 1f / (Mathf.Pow(10f, m_Settings.noiseFilterTolerance.value) + uTolerance); cmd.SetComputeVectorParam(cs, "InvLowResolution", new Vector2(1f / lowResDepthSize.x, 1f / lowResDepthSize.y)); cmd.SetComputeVectorParam(cs, "InvHighResolution", new Vector2(1f / highResDepthSize.x, 1f / highResDepthSize.y)); cmd.SetComputeVectorParam(cs, "AdditionalParams", new Vector4(noiseFilterWeight, stepSize, bTolerance, uTolerance)); cmd.SetComputeTextureParam(cs, kernel, "LoResDB", lowResDepth); cmd.SetComputeTextureParam(cs, kernel, "HiResDB", highResDepth); cmd.SetComputeTextureParam(cs, kernel, "LoResAO1", interleavedAO); if (highResAO != null) cmd.SetComputeTextureParam(cs, kernel, "HiResAO", highResAO.Value); cmd.SetComputeTextureParam(cs, kernel, "AoResult", dest); int xcount = ((int)highResDepthSize.x + 17) / 16; int ycount = ((int)highResDepthSize.y + 17) / 16; cmd.DispatchCompute(cs, kernel, xcount, ycount, 1); } void PushReleaseCommands(CommandBuffer cmd) { Release(cmd, ShaderIDs.LinearDepth); Release(cmd, ShaderIDs.LowDepth1); Release(cmd, ShaderIDs.LowDepth1); Release(cmd, ShaderIDs.LowDepth1); Release(cmd, ShaderIDs.LowDepth1); Release(cmd, ShaderIDs.TiledDepth1); Release(cmd, ShaderIDs.TiledDepth2); Release(cmd, ShaderIDs.TiledDepth3); Release(cmd, ShaderIDs.TiledDepth4); Release(cmd, ShaderIDs.Occlusion1); Release(cmd, ShaderIDs.Occlusion2); Release(cmd, ShaderIDs.Occlusion3); Release(cmd, ShaderIDs.Occlusion4); Release(cmd, ShaderIDs.Combined1); Release(cmd, ShaderIDs.Combined2); Release(cmd, ShaderIDs.Combined3); } void PreparePropertySheet(PostProcessRenderContext context) { var sheet = context.propertySheets.Get(m_Resources.shaders.multiScaleAO); sheet.ClearKeywords(); sheet.properties.SetVector(ShaderIDs.AOColor, Color.white - m_Settings.color.value); m_PropertySheet = sheet; } void CheckAOTexture(PostProcessRenderContext context) { if (m_AmbientOnlyAO == null || !m_AmbientOnlyAO.IsCreated() || m_AmbientOnlyAO.width != context.width || m_AmbientOnlyAO.height != context.height) { RuntimeUtilities.Destroy(m_AmbientOnlyAO); m_AmbientOnlyAO = new RenderTexture(context.width, context.height, 0, RenderTextureFormat.R8, RenderTextureReadWrite.Linear) { hideFlags = HideFlags.DontSave, filterMode = FilterMode.Point, enableRandomWrite = true }; m_AmbientOnlyAO.Create(); } } void PushDebug(PostProcessRenderContext context) { if (context.IsDebugOverlayEnabled(DebugOverlay.AmbientOcclusion)) context.PushDebugOverlay(context.command, m_AmbientOnlyAO, m_PropertySheet, (int)Pass.DebugOverlay); } public void RenderAfterOpaque(PostProcessRenderContext context) { var cmd = context.command; cmd.BeginSample("Ambient Occlusion"); SetResources(context.resources); PreparePropertySheet(context); CheckAOTexture(context); // In Forward mode, fog is applied at the object level in the grometry pass so we need // to apply it to AO as well or it'll drawn on top of the fog effect. if (context.camera.actualRenderingPath == RenderingPath.Forward && RenderSettings.fog) { m_PropertySheet.EnableKeyword("APPLY_FORWARD_FOG"); m_PropertySheet.properties.SetVector( ShaderIDs.FogParams, new Vector3(RenderSettings.fogDensity, RenderSettings.fogStartDistance, RenderSettings.fogEndDistance) ); } GenerateAOMap(cmd, context.camera, m_AmbientOnlyAO, null, false); PushDebug(context); cmd.SetGlobalTexture(ShaderIDs.MSVOcclusionTexture, m_AmbientOnlyAO); cmd.BlitFullscreenTriangle(BuiltinRenderTextureType.None, BuiltinRenderTextureType.CameraTarget, m_PropertySheet, (int)Pass.CompositionForward); cmd.EndSample("Ambient Occlusion"); } public void RenderAmbientOnly(PostProcessRenderContext context) { var cmd = context.command; cmd.BeginSample("Ambient Occlusion Render"); SetResources(context.resources); PreparePropertySheet(context); CheckAOTexture(context); GenerateAOMap(cmd, context.camera, m_AmbientOnlyAO, null, false); PushDebug(context); cmd.EndSample("Ambient Occlusion Render"); } public void CompositeAmbientOnly(PostProcessRenderContext context) { var cmd = context.command; cmd.BeginSample("Ambient Occlusion Composite"); cmd.SetGlobalTexture(ShaderIDs.MSVOcclusionTexture, m_AmbientOnlyAO); cmd.BlitFullscreenTriangle(BuiltinRenderTextureType.None, m_MRT, BuiltinRenderTextureType.CameraTarget, m_PropertySheet, (int)Pass.CompositionDeferred); cmd.EndSample("Ambient Occlusion Composite"); } public void Release() { RuntimeUtilities.Destroy(m_AmbientOnlyAO); m_AmbientOnlyAO = null; } } #else [Serializable] public sealed class MultiScaleVO : IAmbientOcclusionMethod { public MultiScaleVO(AmbientOcclusion settings) { } public void SetResources(PostProcessResources resources) { } public DepthTextureMode GetCameraFlags() { return DepthTextureMode.None; } public void GenerateAOMap(CommandBuffer cmd, Camera camera, RenderTargetIdentifier destination, RenderTargetIdentifier? depthMap, bool invert) { } public void RenderAfterOpaque(PostProcessRenderContext context) { } public void RenderAmbientOnly(PostProcessRenderContext context) { } public void CompositeAmbientOnly(PostProcessRenderContext context) { } public void Release() { } } #endif }
527
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { // Scalable ambient obscurance [Serializable] public sealed class ScalableAO : IAmbientOcclusionMethod { RenderTexture m_Result; PropertySheet m_PropertySheet; AmbientOcclusion m_Settings; readonly RenderTargetIdentifier[] m_MRT = { BuiltinRenderTextureType.GBuffer0, // Albedo, Occ BuiltinRenderTextureType.CameraTarget // Ambient }; readonly int[] m_SampleCount = { 4, 6, 10, 8, 12 }; enum Pass { OcclusionEstimationForward, OcclusionEstimationDeferred, HorizontalBlurForward, HorizontalBlurDeferred, VerticalBlur, CompositionForward, CompositionDeferred, DebugOverlay } public ScalableAO(AmbientOcclusion settings) { m_Settings = settings; } public DepthTextureMode GetCameraFlags() { return DepthTextureMode.Depth | DepthTextureMode.DepthNormals; } void DoLazyInitialization(PostProcessRenderContext context) { m_PropertySheet = context.propertySheets.Get(context.resources.shaders.scalableAO); bool reset = false; if (m_Result == null || !m_Result.IsCreated()) { // Initial allocation m_Result = context.GetScreenSpaceTemporaryRT(0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear); m_Result.hideFlags = HideFlags.DontSave; m_Result.filterMode = FilterMode.Bilinear; reset = true; } else if (m_Result.width != context.width || m_Result.height != context.height) { // Release and reallocate m_Result.Release(); m_Result.width = context.width; m_Result.height = context.height; reset = true; } if (reset) m_Result.Create(); } void Render(PostProcessRenderContext context, CommandBuffer cmd, int occlusionSource) { DoLazyInitialization(context); m_Settings.radius.value = Mathf.Max(m_Settings.radius.value, 1e-4f); // Material setup // Always use a quater-res AO buffer unless High/Ultra quality is set. bool downsampling = (int)m_Settings.quality.value < (int)AmbientOcclusionQuality.High; float px = m_Settings.intensity.value; float py = m_Settings.radius.value; float pz = downsampling ? 0.5f : 1f; float pw = m_SampleCount[(int)m_Settings.quality.value]; var sheet = m_PropertySheet; sheet.ClearKeywords(); sheet.properties.SetVector(ShaderIDs.AOParams, new Vector4(px, py, pz, pw)); sheet.properties.SetVector(ShaderIDs.AOColor, Color.white - m_Settings.color.value); // In forward fog is applied at the object level in the grometry pass so we need to // apply it to AO as well or it'll drawn on top of the fog effect. // Not needed in Deferred. if (context.camera.actualRenderingPath == RenderingPath.Forward && RenderSettings.fog) { sheet.EnableKeyword("APPLY_FORWARD_FOG"); sheet.properties.SetVector( ShaderIDs.FogParams, new Vector3(RenderSettings.fogDensity, RenderSettings.fogStartDistance, RenderSettings.fogEndDistance) ); } // Texture setup int ts = downsampling ? 2 : 1; const RenderTextureFormat kFormat = RenderTextureFormat.ARGB32; const RenderTextureReadWrite kRWMode = RenderTextureReadWrite.Linear; const FilterMode kFilter = FilterMode.Bilinear; // AO buffer var rtMask = ShaderIDs.OcclusionTexture1; int scaledWidth = context.width / ts; int scaledHeight = context.height / ts; context.GetScreenSpaceTemporaryRT(cmd, rtMask, 0, kFormat, kRWMode, kFilter, scaledWidth, scaledHeight); // AO estimation cmd.BlitFullscreenTriangle(BuiltinRenderTextureType.None, rtMask, sheet, (int)Pass.OcclusionEstimationForward + occlusionSource); // Blur buffer var rtBlur = ShaderIDs.OcclusionTexture2; context.GetScreenSpaceTemporaryRT(cmd, rtBlur, 0, kFormat, kRWMode, kFilter); // Separable blur (horizontal pass) cmd.BlitFullscreenTriangle(rtMask, rtBlur, sheet, (int)Pass.HorizontalBlurForward + occlusionSource); cmd.ReleaseTemporaryRT(rtMask); // Separable blur (vertical pass) cmd.BlitFullscreenTriangle(rtBlur, m_Result, sheet, (int)Pass.VerticalBlur); cmd.ReleaseTemporaryRT(rtBlur); if (context.IsDebugOverlayEnabled(DebugOverlay.AmbientOcclusion)) context.PushDebugOverlay(cmd, m_Result, sheet, (int)Pass.DebugOverlay); } public void RenderAfterOpaque(PostProcessRenderContext context) { var cmd = context.command; cmd.BeginSample("Ambient Occlusion"); Render(context, cmd, 0); cmd.SetGlobalTexture(ShaderIDs.SAOcclusionTexture, m_Result); cmd.BlitFullscreenTriangle(BuiltinRenderTextureType.None, BuiltinRenderTextureType.CameraTarget, m_PropertySheet, (int)Pass.CompositionForward); cmd.EndSample("Ambient Occlusion"); } public void RenderAmbientOnly(PostProcessRenderContext context) { var cmd = context.command; cmd.BeginSample("Ambient Occlusion Render"); Render(context, cmd, 1); cmd.EndSample("Ambient Occlusion Render"); } public void CompositeAmbientOnly(PostProcessRenderContext context) { var cmd = context.command; cmd.BeginSample("Ambient Occlusion Composite"); cmd.SetGlobalTexture(ShaderIDs.SAOcclusionTexture, m_Result); cmd.BlitFullscreenTriangle(BuiltinRenderTextureType.None, m_MRT, BuiltinRenderTextureType.CameraTarget, m_PropertySheet, (int)Pass.CompositionDeferred); cmd.EndSample("Ambient Occlusion Composite"); } public void Release() { RuntimeUtilities.Destroy(m_Result); m_Result = null; } } }
167
orrb
openai
C#
using System; using UnityEngine.Assertions; namespace UnityEngine.Rendering.PostProcessing { public enum ScreenSpaceReflectionPreset { Lower, Low, Medium, High, Higher, Ultra, Overkill, Custom } public enum ScreenSpaceReflectionResolution { Downsampled, FullSize, Supersampled } [Serializable] public sealed class ScreenSpaceReflectionPresetParameter : ParameterOverride<ScreenSpaceReflectionPreset> { } [Serializable] public sealed class ScreenSpaceReflectionResolutionParameter : ParameterOverride<ScreenSpaceReflectionResolution> { } [Serializable] [PostProcess(typeof(ScreenSpaceReflectionsRenderer), "Unity/Screen-space reflections")] public sealed class ScreenSpaceReflections : PostProcessEffectSettings { [Tooltip("Choose a quality preset, or use \"Custom\" to fine tune it. Don't use a preset higher than \"Medium\" if you care about performances on consoles.")] public ScreenSpaceReflectionPresetParameter preset = new ScreenSpaceReflectionPresetParameter { value = ScreenSpaceReflectionPreset.Medium }; [Range(0, 256), Tooltip("Maximum iteration count.")] public IntParameter maximumIterationCount = new IntParameter { value = 16 }; [Tooltip("Changes the size of the SSR buffer. Downsample it to maximize performances or supersample it to get slow but higher quality results.")] public ScreenSpaceReflectionResolutionParameter resolution = new ScreenSpaceReflectionResolutionParameter { value = ScreenSpaceReflectionResolution.Downsampled }; [Range(1f, 64f), Tooltip("Ray thickness. Lower values are more expensive but allow the effect to detect smaller details.")] public FloatParameter thickness = new FloatParameter { value = 8f }; [Tooltip("Maximum distance to traverse after which it will stop drawing reflections.")] public FloatParameter maximumMarchDistance = new FloatParameter { value = 100f }; [Range(0f, 1f), Tooltip("Fades reflections close to the near planes.")] public FloatParameter distanceFade = new FloatParameter { value = 0.5f }; [Range(0f, 1f), Tooltip("Fades reflections close to the screen edges.")] public FloatParameter vignette = new FloatParameter { value = 0.5f }; public override bool IsEnabledAndSupported(PostProcessRenderContext context) { return enabled && context.camera.actualRenderingPath == RenderingPath.DeferredShading && SystemInfo.supportsMotionVectors && SystemInfo.supportsComputeShaders && SystemInfo.copyTextureSupport > CopyTextureSupport.None; } } public sealed class ScreenSpaceReflectionsRenderer : PostProcessEffectRenderer<ScreenSpaceReflections> { RenderTexture m_Resolve; RenderTexture m_History; int[] m_MipIDs; class QualityPreset { public int maximumIterationCount; public float thickness; public ScreenSpaceReflectionResolution downsampling; } readonly QualityPreset[] m_Presets = { new QualityPreset { maximumIterationCount = 10, thickness = 32, downsampling = ScreenSpaceReflectionResolution.Downsampled }, // Lower new QualityPreset { maximumIterationCount = 16, thickness = 32, downsampling = ScreenSpaceReflectionResolution.Downsampled }, // Low new QualityPreset { maximumIterationCount = 32, thickness = 16, downsampling = ScreenSpaceReflectionResolution.Downsampled }, // Medium new QualityPreset { maximumIterationCount = 48, thickness = 8, downsampling = ScreenSpaceReflectionResolution.Downsampled }, // High new QualityPreset { maximumIterationCount = 16, thickness = 32, downsampling = ScreenSpaceReflectionResolution.FullSize }, // Higher new QualityPreset { maximumIterationCount = 48, thickness = 16, downsampling = ScreenSpaceReflectionResolution.FullSize }, // Ultra new QualityPreset { maximumIterationCount = 128, thickness = 12, downsampling = ScreenSpaceReflectionResolution.Supersampled }, // Overkill }; enum Pass { Test, Resolve, Reproject, Composite } public override DepthTextureMode GetCameraFlags() { return DepthTextureMode.Depth | DepthTextureMode.MotionVectors; } internal void CheckRT(ref RenderTexture rt, int width, int height, RenderTextureFormat format, FilterMode filterMode, bool useMipMap) { if (rt == null || !rt.IsCreated() || rt.width != width || rt.height != height) { if (rt != null) rt.Release(); rt = new RenderTexture(width, height, 0, format) { filterMode = filterMode, useMipMap = useMipMap, autoGenerateMips = false, hideFlags = HideFlags.HideAndDontSave }; rt.Create(); } } public override void Render(PostProcessRenderContext context) { var cmd = context.command; cmd.BeginSample("Screen-space Reflections"); // Get quality settings if (settings.preset.value != ScreenSpaceReflectionPreset.Custom) { int id = (int)settings.preset.value; settings.maximumIterationCount.value = m_Presets[id].maximumIterationCount; settings.thickness.value = m_Presets[id].thickness; settings.resolution.value = m_Presets[id].downsampling; } settings.maximumMarchDistance.value = Mathf.Max(0f, settings.maximumMarchDistance.value); // Square POT target int size = Mathf.ClosestPowerOfTwo(Mathf.Min(context.width, context.height)); if (settings.resolution.value == ScreenSpaceReflectionResolution.Downsampled) size >>= 1; else if (settings.resolution.value == ScreenSpaceReflectionResolution.Supersampled) size <<= 1; // The gaussian pyramid compute works in blocks of 8x8 so make sure the last lod has a // minimum size of 8x8 const int kMaxLods = 12; int lodCount = Mathf.FloorToInt(Mathf.Log(size, 2f) - 3f); lodCount = Mathf.Min(lodCount, kMaxLods); CheckRT(ref m_Resolve, size, size, context.sourceFormat, FilterMode.Trilinear, true); var noiseTex = context.resources.blueNoise256[0]; var sheet = context.propertySheets.Get(context.resources.shaders.screenSpaceReflections); sheet.properties.SetTexture(ShaderIDs.Noise, noiseTex); var screenSpaceProjectionMatrix = new Matrix4x4(); screenSpaceProjectionMatrix.SetRow(0, new Vector4(size * 0.5f, 0f, 0f, size * 0.5f)); screenSpaceProjectionMatrix.SetRow(1, new Vector4(0f, size * 0.5f, 0f, size * 0.5f)); screenSpaceProjectionMatrix.SetRow(2, new Vector4(0f, 0f, 1f, 0f)); screenSpaceProjectionMatrix.SetRow(3, new Vector4(0f, 0f, 0f, 1f)); var projectionMatrix = GL.GetGPUProjectionMatrix(context.camera.projectionMatrix, false); screenSpaceProjectionMatrix *= projectionMatrix; sheet.properties.SetMatrix(ShaderIDs.ViewMatrix, context.camera.worldToCameraMatrix); sheet.properties.SetMatrix(ShaderIDs.InverseViewMatrix, context.camera.worldToCameraMatrix.inverse); sheet.properties.SetMatrix(ShaderIDs.InverseProjectionMatrix, projectionMatrix.inverse); sheet.properties.SetMatrix(ShaderIDs.ScreenSpaceProjectionMatrix, screenSpaceProjectionMatrix); sheet.properties.SetVector(ShaderIDs.Params, new Vector4((float)settings.vignette.value, settings.distanceFade.value, settings.maximumMarchDistance.value, lodCount)); sheet.properties.SetVector(ShaderIDs.Params2, new Vector4((float)context.width / (float)context.height, (float)size / (float)noiseTex.width, settings.thickness.value, settings.maximumIterationCount.value)); cmd.GetTemporaryRT(ShaderIDs.Test, size, size, 0, FilterMode.Point, context.sourceFormat); cmd.BlitFullscreenTriangle(context.source, ShaderIDs.Test, sheet, (int)Pass.Test); if (context.isSceneView) { cmd.BlitFullscreenTriangle(context.source, m_Resolve, sheet, (int)Pass.Resolve); } else { CheckRT(ref m_History, size, size, context.sourceFormat, FilterMode.Bilinear, false); if (m_ResetHistory) { context.command.BlitFullscreenTriangle(context.source, m_History); m_ResetHistory = false; } cmd.GetTemporaryRT(ShaderIDs.SSRResolveTemp, size, size, 0, FilterMode.Bilinear, context.sourceFormat); cmd.BlitFullscreenTriangle(context.source, ShaderIDs.SSRResolveTemp, sheet, (int)Pass.Resolve); sheet.properties.SetTexture(ShaderIDs.History, m_History); cmd.BlitFullscreenTriangle(ShaderIDs.SSRResolveTemp, m_Resolve, sheet, (int)Pass.Reproject); cmd.CopyTexture(m_Resolve, 0, 0, m_History, 0, 0); cmd.ReleaseTemporaryRT(ShaderIDs.SSRResolveTemp); } cmd.ReleaseTemporaryRT(ShaderIDs.Test); // Pre-cache mipmaps ids if (m_MipIDs == null || m_MipIDs.Length == 0) { m_MipIDs = new int[kMaxLods]; for (int i = 0; i < kMaxLods; i++) m_MipIDs[i] = Shader.PropertyToID("_SSRGaussianMip" + i); } var compute = context.resources.computeShaders.gaussianDownsample; int kernel = compute.FindKernel("KMain"); var last = new RenderTargetIdentifier(m_Resolve); for (int i = 0; i < lodCount; i++) { size >>= 1; Assert.IsTrue(size > 0); cmd.GetTemporaryRT(m_MipIDs[i], size, size, 0, FilterMode.Bilinear, context.sourceFormat, RenderTextureReadWrite.Default, 1, true); cmd.SetComputeTextureParam(compute, kernel, "_Source", last); cmd.SetComputeTextureParam(compute, kernel, "_Result", m_MipIDs[i]); cmd.SetComputeVectorParam(compute, "_Size", new Vector4(size, size, 1f / size, 1f / size)); cmd.DispatchCompute(compute, kernel, size / 8, size / 8, 1); cmd.CopyTexture(m_MipIDs[i], 0, 0, m_Resolve, 0, i + 1); last = m_MipIDs[i]; } for (int i = 0; i < lodCount; i++) cmd.ReleaseTemporaryRT(m_MipIDs[i]); sheet.properties.SetTexture(ShaderIDs.Resolve, m_Resolve); cmd.BlitFullscreenTriangle(context.source, context.destination, sheet, (int)Pass.Composite); cmd.EndSample("Screen-space Reflections"); } public override void Release() { RuntimeUtilities.Destroy(m_Resolve); RuntimeUtilities.Destroy(m_History); m_Resolve = null; m_History = null; } } }
243
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { [Serializable] public sealed class SubpixelMorphologicalAntialiasing { enum Pass { EdgeDetection, BlendWeights, NeighborhoodBlending } public bool IsSupported() { return !RuntimeUtilities.isSinglePassStereoEnabled; } internal void Render(PostProcessRenderContext context) { var sheet = context.propertySheets.Get(context.resources.shaders.subpixelMorphologicalAntialiasing); sheet.properties.SetTexture("_AreaTex", context.resources.smaaLuts.area); sheet.properties.SetTexture("_SearchTex", context.resources.smaaLuts.search); var cmd = context.command; cmd.BeginSample("SubpixelMorphologicalAntialiasing"); cmd.GetTemporaryRT(ShaderIDs.SMAA_Flip, context.width, context.height, 0, FilterMode.Bilinear, context.sourceFormat, RenderTextureReadWrite.Linear); cmd.GetTemporaryRT(ShaderIDs.SMAA_Flop, context.width, context.height, 0, FilterMode.Bilinear, context.sourceFormat, RenderTextureReadWrite.Linear); cmd.BlitFullscreenTriangle(context.source, ShaderIDs.SMAA_Flip, sheet, (int)Pass.EdgeDetection, true); cmd.BlitFullscreenTriangle(ShaderIDs.SMAA_Flip, ShaderIDs.SMAA_Flop, sheet, (int)Pass.BlendWeights); cmd.SetGlobalTexture("_BlendTex", ShaderIDs.SMAA_Flop); cmd.BlitFullscreenTriangle(context.source, context.destination, sheet, (int)Pass.NeighborhoodBlending); cmd.ReleaseTemporaryRT(ShaderIDs.SMAA_Flip); cmd.ReleaseTemporaryRT(ShaderIDs.SMAA_Flop); cmd.EndSample("SubpixelMorphologicalAntialiasing"); } } }
44
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { [Serializable] public sealed class TemporalAntialiasing { [Tooltip("The diameter (in texels) inside which jitter samples are spread. Smaller values result in crisper but more aliased output, while larger values result in more stable but blurrier output.")] [Range(0.1f, 1f)] public float jitterSpread = 0.75f; [Tooltip("Controls the amount of sharpening applied to the color buffer. High values may introduce dark-border artifacts.")] [Range(0f, 3f)] public float sharpness = 0.25f; [Tooltip("The blend coefficient for a stationary fragment. Controls the percentage of history sample blended into the final color.")] [Range(0f, 0.99f)] public float stationaryBlending = 0.95f; [Tooltip("The blend coefficient for a fragment with significant motion. Controls the percentage of history sample blended into the final color.")] [Range(0f, 0.99f)] public float motionBlending = 0.85f; // For custom jittered matrices - use at your own risks public Func<Camera, Vector2, Matrix4x4> jitteredMatrixFunc; public Vector2 jitter { get; private set; } enum Pass { SolverDilate, SolverNoDilate } readonly RenderTargetIdentifier[] m_Mrt = new RenderTargetIdentifier[2]; bool m_ResetHistory = true; const int k_SampleCount = 8; int m_SampleIndex; // Ping-pong between two history textures as we can't read & write the same target in the // same pass const int k_NumEyes = 2; const int k_NumHistoryTextures = 2; readonly RenderTexture[][] m_HistoryTextures = new RenderTexture[k_NumEyes][]; int[] m_HistoryPingPong = new int [k_NumEyes]; public bool IsSupported() { return SystemInfo.supportedRenderTargetCount >= 2 && SystemInfo.supportsMotionVectors #if !UNITY_2017_3_OR_NEWER && !RuntimeUtilities.isVREnabled #endif && SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2; } internal DepthTextureMode GetCameraFlags() { return DepthTextureMode.Depth | DepthTextureMode.MotionVectors; } internal void ResetHistory() { m_ResetHistory = true; } Vector2 GenerateRandomOffset() { // The variance between 0 and the actual halton sequence values reveals noticeable instability // in Unity's shadow maps, so we avoid index 0. var offset = new Vector2( HaltonSeq.Get((m_SampleIndex & 1023) + 1, 2) - 0.5f, HaltonSeq.Get((m_SampleIndex & 1023) + 1, 3) - 0.5f ); if (++m_SampleIndex >= k_SampleCount) m_SampleIndex = 0; return offset; } public Matrix4x4 GetJitteredProjectionMatrix(Camera camera) { Matrix4x4 cameraProj; jitter = GenerateRandomOffset(); jitter *= jitterSpread; if (jitteredMatrixFunc != null) { cameraProj = jitteredMatrixFunc(camera, jitter); } else { cameraProj = camera.orthographic ? RuntimeUtilities.GetJitteredOrthographicProjectionMatrix(camera, jitter) : RuntimeUtilities.GetJitteredPerspectiveProjectionMatrix(camera, jitter); } jitter = new Vector2(jitter.x / camera.pixelWidth, jitter.y / camera.pixelHeight); return cameraProj; } public void ConfigureJitteredProjectionMatrix(PostProcessRenderContext context) { var camera = context.camera; camera.nonJitteredProjectionMatrix = camera.projectionMatrix; camera.projectionMatrix = GetJitteredProjectionMatrix(camera); camera.useJitteredProjectionMatrixForTransparentRendering = false; } // TODO: We'll probably need to isolate most of this for SRPs public void ConfigureStereoJitteredProjectionMatrices(PostProcessRenderContext context) { #if UNITY_2017_3_OR_NEWER var camera = context.camera; jitter = GenerateRandomOffset(); jitter *= jitterSpread; for (var eye = Camera.StereoscopicEye.Left; eye <= Camera.StereoscopicEye.Right; eye++) { // This saves off the device generated projection matrices as non-jittered context.camera.CopyStereoDeviceProjectionMatrixToNonJittered(eye); var originalProj = context.camera.GetStereoNonJitteredProjectionMatrix(eye); // Currently no support for custom jitter func, as VR devices would need to provide // original projection matrix as input along with jitter var jitteredMatrix = RuntimeUtilities.GenerateJitteredProjectionMatrixFromOriginal(context, originalProj, jitter); context.camera.SetStereoProjectionMatrix(eye, jitteredMatrix); } // jitter has to be scaled for the actual eye texture size, not just the intermediate texture size // which could be double-wide in certain stereo rendering scenarios jitter = new Vector2(jitter.x / context.screenWidth, jitter.y / context.screenHeight); camera.useJitteredProjectionMatrixForTransparentRendering = false; #endif } void GenerateHistoryName(RenderTexture rt, int id, PostProcessRenderContext context) { rt.name = "Temporal Anti-aliasing History id #" + id; if (context.stereoActive) rt.name += " for eye " + context.xrActiveEye; } RenderTexture CheckHistory(int id, PostProcessRenderContext context) { int activeEye = context.xrActiveEye; if (m_HistoryTextures[activeEye] == null) m_HistoryTextures[activeEye] = new RenderTexture[k_NumHistoryTextures]; var rt = m_HistoryTextures[activeEye][id]; if (m_ResetHistory || rt == null || !rt.IsCreated()) { RenderTexture.ReleaseTemporary(rt); rt = context.GetScreenSpaceTemporaryRT(0, context.sourceFormat); GenerateHistoryName(rt, id, context); rt.filterMode = FilterMode.Bilinear; m_HistoryTextures[activeEye][id] = rt; context.command.BlitFullscreenTriangle(context.source, rt); } else if (rt.width != context.width || rt.height != context.height) { // On size change, simply copy the old history to the new one. This looks better // than completely discarding the history and seeing a few aliased frames. var rt2 = context.GetScreenSpaceTemporaryRT(0, context.sourceFormat); GenerateHistoryName(rt2, id, context); rt2.filterMode = FilterMode.Bilinear; m_HistoryTextures[activeEye][id] = rt2; context.command.BlitFullscreenTriangle(rt, rt2); RenderTexture.ReleaseTemporary(rt); } return m_HistoryTextures[activeEye][id]; } internal void Render(PostProcessRenderContext context) { var sheet = context.propertySheets.Get(context.resources.shaders.temporalAntialiasing); var cmd = context.command; cmd.BeginSample("TemporalAntialiasing"); int pp = m_HistoryPingPong[context.xrActiveEye]; var historyRead = CheckHistory(++pp % 2, context); var historyWrite = CheckHistory(++pp % 2, context); m_HistoryPingPong[context.xrActiveEye] = ++pp % 2; const float kMotionAmplification = 100f * 60f; sheet.properties.SetVector(ShaderIDs.Jitter, jitter); sheet.properties.SetFloat(ShaderIDs.Sharpness, sharpness); sheet.properties.SetVector(ShaderIDs.FinalBlendParameters, new Vector4(stationaryBlending, motionBlending, kMotionAmplification, 0f)); sheet.properties.SetTexture(ShaderIDs.HistoryTex, historyRead); // TODO: Account for different possible RenderViewportScale value from previous frame... int pass = context.camera.orthographic ? (int)Pass.SolverNoDilate : (int)Pass.SolverDilate; m_Mrt[0] = context.destination; m_Mrt[1] = historyWrite; cmd.BlitFullscreenTriangle(context.source, m_Mrt, context.source, sheet, pass); cmd.EndSample("TemporalAntialiasing"); m_ResetHistory = false; } internal void Release() { if (m_HistoryTextures != null) { for (int i = 0; i < m_HistoryTextures.Length; i++) { if (m_HistoryTextures[i] == null) continue; for (int j = 0; j < m_HistoryTextures[i].Length; j++) { RenderTexture.ReleaseTemporary(m_HistoryTextures[i][j]); m_HistoryTextures[i][j] = null; } m_HistoryTextures[i] = null; } } m_SampleIndex = 0; m_HistoryPingPong[0] = 0; m_HistoryPingPong[1] = 0; ResetHistory(); } } }
243
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { public enum VignetteMode { Classic, Masked } [Serializable] public sealed class VignetteModeParameter : ParameterOverride<VignetteMode> {} [Serializable] [PostProcess(typeof(VignetteRenderer), "Unity/Vignette")] public sealed class Vignette : PostProcessEffectSettings { [Tooltip("Use the \"Classic\" mode for parametric controls. Use the \"Masked\" mode to use your own texture mask.")] public VignetteModeParameter mode = new VignetteModeParameter { value = VignetteMode.Classic }; [Tooltip("Vignette color. Use the alpha channel for transparency.")] public ColorParameter color = new ColorParameter { value = new Color(0f, 0f, 0f, 1f) }; [Tooltip("Sets the vignette center point (screen center is [0.5,0.5]).")] public Vector2Parameter center = new Vector2Parameter { value = new Vector2(0.5f, 0.5f) }; [Range(0f, 1f), Tooltip("Amount of vignetting on screen.")] public FloatParameter intensity = new FloatParameter { value = 0f }; [Range(0.01f, 1f), Tooltip("Smoothness of the vignette borders.")] public FloatParameter smoothness = new FloatParameter { value = 0.2f }; [Range(0f, 1f), Tooltip("Lower values will make a square-ish vignette.")] public FloatParameter roundness = new FloatParameter { value = 1f }; [Tooltip("Should the vignette be perfectly round or be dependent on the current aspect ratio?")] public BoolParameter rounded = new BoolParameter { value = false }; [Tooltip("A black and white mask to use as a vignette.")] public TextureParameter mask = new TextureParameter { value = null }; [Range(0f, 1f), Tooltip("Mask opacity.")] public FloatParameter opacity = new FloatParameter { value = 1f }; public override bool IsEnabledAndSupported(PostProcessRenderContext context) { return enabled.value && ((mode.value == VignetteMode.Classic && intensity.value > 0f) || (mode.value == VignetteMode.Masked && opacity.value > 0f && mask.value != null)); } } public sealed class VignetteRenderer : PostProcessEffectRenderer<Vignette> { public override void Render(PostProcessRenderContext context) { var sheet = context.uberSheet; sheet.EnableKeyword("VIGNETTE"); sheet.properties.SetColor(ShaderIDs.Vignette_Color, settings.color.value); if (settings.mode == VignetteMode.Classic) { sheet.properties.SetFloat(ShaderIDs.Vignette_Mode, 0f); sheet.properties.SetVector(ShaderIDs.Vignette_Center, settings.center.value); float roundness = (1f - settings.roundness.value) * 6f + settings.roundness.value; sheet.properties.SetVector(ShaderIDs.Vignette_Settings, new Vector4(settings.intensity.value * 3f, settings.smoothness.value * 5f, roundness, settings.rounded.value ? 1f : 0f)); } else // Masked { sheet.properties.SetFloat(ShaderIDs.Vignette_Mode, 1f); sheet.properties.SetTexture(ShaderIDs.Vignette_Mask, settings.mask.value); sheet.properties.SetFloat(ShaderIDs.Vignette_Opacity, Mathf.Clamp01(settings.opacity.value)); } } } }
77
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { [Serializable] public sealed class HistogramMonitor : Monitor { public enum Channel { Red, Green, Blue, Master } public int width = 512; public int height = 256; public Channel channel = Channel.Master; ComputeBuffer m_Data; int m_NumBins; int m_ThreadGroupSizeX; int m_ThreadGroupSizeY; internal override void OnEnable() { m_ThreadGroupSizeX = 16; if (RuntimeUtilities.isAndroidOpenGL) { m_NumBins = 128; m_ThreadGroupSizeY = 8; } else { m_NumBins = 256; m_ThreadGroupSizeY = 16; } } internal override void OnDisable() { base.OnDisable(); if (m_Data != null) m_Data.Release(); m_Data = null; } internal override bool NeedsHalfRes() { return true; } internal override void Render(PostProcessRenderContext context) { CheckOutput(width, height); if (m_Data == null) m_Data = new ComputeBuffer(m_NumBins, sizeof(uint)); var compute = context.resources.computeShaders.gammaHistogram; var cmd = context.command; cmd.BeginSample("GammaHistogram"); // Clear the buffer on every frame as we use it to accumulate values on every frame int kernel = compute.FindKernel("KHistogramClear"); cmd.SetComputeBufferParam(compute, kernel, "_HistogramBuffer", m_Data); cmd.DispatchCompute(compute, kernel, Mathf.CeilToInt(m_NumBins / (float)m_ThreadGroupSizeX), 1, 1); // Gather all pixels and fill in our histogram kernel = compute.FindKernel("KHistogramGather"); var parameters = new Vector4( context.width / 2, context.height / 2, RuntimeUtilities.isLinearColorSpace ? 1 : 0, (int)channel ); cmd.SetComputeVectorParam(compute, "_Params", parameters); cmd.SetComputeTextureParam(compute, kernel, "_Source", ShaderIDs.HalfResFinalCopy); cmd.SetComputeBufferParam(compute, kernel, "_HistogramBuffer", m_Data); cmd.DispatchCompute(compute, kernel, Mathf.CeilToInt(parameters.x / m_ThreadGroupSizeX), Mathf.CeilToInt(parameters.y / m_ThreadGroupSizeY), 1 ); // Generate the histogram texture var sheet = context.propertySheets.Get(context.resources.shaders.gammaHistogram); sheet.properties.SetVector(ShaderIDs.Params, new Vector4(width, height, 0f, 0f)); sheet.properties.SetBuffer(ShaderIDs.HistogramBuffer, m_Data); cmd.BlitFullscreenTriangle(BuiltinRenderTextureType.None, output, sheet, 0); cmd.EndSample("GammaHistogram"); } } }
100
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { [Serializable] public sealed class LightMeterMonitor : Monitor { public int width = 512; public int height = 256; // Note: only works with HDR grading, as this monitor only makes sense when working in HDR public bool showCurves = true; internal override void Render(PostProcessRenderContext context) { CheckOutput(width, height); var histogram = context.logHistogram; var sheet = context.propertySheets.Get(context.resources.shaders.lightMeter); sheet.ClearKeywords(); sheet.properties.SetBuffer(ShaderIDs.HistogramBuffer, histogram.data); var scaleOffsetRes = histogram.GetHistogramScaleOffsetRes(context); scaleOffsetRes.z = 1f / width; scaleOffsetRes.w = 1f / height; sheet.properties.SetVector(ShaderIDs.ScaleOffsetRes, scaleOffsetRes); if (context.logLut != null && showCurves) { sheet.EnableKeyword("COLOR_GRADING_HDR"); sheet.properties.SetTexture(ShaderIDs.Lut3D, context.logLut); } var autoExpo = context.autoExposure; if (autoExpo != null) { // Make sure filtering values are correct to avoid apocalyptic consequences float lowPercent = autoExpo.filtering.value.x; float highPercent = autoExpo.filtering.value.y; const float kMinDelta = 1e-2f; highPercent = Mathf.Clamp(highPercent, 1f + kMinDelta, 99f); lowPercent = Mathf.Clamp(lowPercent, 1f, highPercent - kMinDelta); var parameters = new Vector4( lowPercent * 0.01f, highPercent * 0.01f, RuntimeUtilities.Exp2(autoExpo.minLuminance.value), RuntimeUtilities.Exp2(autoExpo.maxLuminance.value) ); sheet.EnableKeyword("AUTO_EXPOSURE"); sheet.properties.SetVector(ShaderIDs.Params, parameters); } var cmd = context.command; cmd.BeginSample("LightMeter"); cmd.BlitFullscreenTriangle(BuiltinRenderTextureType.None, output, sheet, 0); cmd.EndSample("LightMeter"); } } }
64
orrb
openai
C#
namespace UnityEngine.Rendering.PostProcessing { public enum MonitorType { LightMeter, Histogram, Waveform, Vectorscope } public abstract class Monitor { public RenderTexture output { get; protected set; } internal bool requested = false; public bool IsRequestedAndSupported() { return requested && SystemInfo.supportsComputeShaders; } internal virtual bool NeedsHalfRes() { return false; } protected void CheckOutput(int width, int height) { if (output == null || !output.IsCreated() || output.width != width || output.height != height) { RuntimeUtilities.Destroy(output); output = new RenderTexture(width, height, 0, RenderTextureFormat.ARGB32) { anisoLevel = 0, filterMode = FilterMode.Bilinear, wrapMode = TextureWrapMode.Clamp, useMipMap = false }; } } internal virtual void OnEnable() { } internal virtual void OnDisable() { RuntimeUtilities.Destroy(output); } internal abstract void Render(PostProcessRenderContext context); } }
55
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { [Serializable] public sealed class VectorscopeMonitor : Monitor { public int size = 256; public float exposure = 0.12f; ComputeBuffer m_Data; int m_ThreadGroupSizeX; int m_ThreadGroupSizeY; internal override void OnEnable() { m_ThreadGroupSizeX = 16; m_ThreadGroupSizeY = RuntimeUtilities.isAndroidOpenGL ? 8 : 16; } internal override void OnDisable() { base.OnDisable(); if (m_Data != null) m_Data.Release(); m_Data = null; } internal override bool NeedsHalfRes() { return true; } internal override void Render(PostProcessRenderContext context) { CheckOutput(size, size); exposure = Mathf.Max(0f, exposure); int count = size * size; if (m_Data == null) m_Data = new ComputeBuffer(count, sizeof(uint)); else if (m_Data.count != count) { m_Data.Release(); m_Data = new ComputeBuffer(count, sizeof(uint)); } var compute = context.resources.computeShaders.vectorscope; var cmd = context.command; cmd.BeginSample("Vectorscope"); var parameters = new Vector4( context.width / 2, context.height / 2, size, RuntimeUtilities.isLinearColorSpace ? 1 : 0 ); // Clear the buffer on every frame as we use it to accumulate values on every frame int kernel = compute.FindKernel("KVectorscopeClear"); cmd.SetComputeBufferParam(compute, kernel, "_VectorscopeBuffer", m_Data); cmd.SetComputeVectorParam(compute, "_Params", parameters); cmd.DispatchCompute(compute, kernel, Mathf.CeilToInt(size / (float)m_ThreadGroupSizeX), Mathf.CeilToInt(size / (float)m_ThreadGroupSizeY), 1 ); // Gather all pixels and fill in our histogram kernel = compute.FindKernel("KVectorscopeGather"); cmd.SetComputeBufferParam(compute, kernel, "_VectorscopeBuffer", m_Data); cmd.SetComputeTextureParam(compute, kernel, "_Source", ShaderIDs.HalfResFinalCopy); cmd.DispatchCompute(compute, kernel, Mathf.CeilToInt(parameters.x / m_ThreadGroupSizeX), Mathf.CeilToInt(parameters.y / m_ThreadGroupSizeY), 1 ); // Generate the histogram texture var sheet = context.propertySheets.Get(context.resources.shaders.vectorscope); sheet.properties.SetVector(ShaderIDs.Params, new Vector4(size, size, exposure, 0f)); sheet.properties.SetBuffer(ShaderIDs.VectorscopeBuffer, m_Data); cmd.BlitFullscreenTriangle(BuiltinRenderTextureType.None, output, sheet, 0); cmd.EndSample("Vectorscope"); } } }
91
orrb
openai
C#
using System; namespace UnityEngine.Rendering.PostProcessing { [Serializable] public sealed class WaveformMonitor : Monitor { public float exposure = 0.12f; public int height = 256; ComputeBuffer m_Data; int m_ThreadGroupSize; int m_ThreadGroupSizeX; int m_ThreadGroupSizeY; internal override void OnEnable() { m_ThreadGroupSizeX = 16; if (RuntimeUtilities.isAndroidOpenGL) { m_ThreadGroupSize = 128; m_ThreadGroupSizeY = 8; } else { m_ThreadGroupSize = 256; m_ThreadGroupSizeY = 16; } } internal override void OnDisable() { base.OnDisable(); if (m_Data != null) m_Data.Release(); m_Data = null; } internal override bool NeedsHalfRes() { return true; } internal override void Render(PostProcessRenderContext context) { // Waveform show localized data, so width depends on the aspect ratio float ratio = (context.width / 2f) / (context.height / 2f); int width = Mathf.FloorToInt(height * ratio); CheckOutput(width, height); exposure = Mathf.Max(0f, exposure); int count = width * height; if (m_Data == null) { m_Data = new ComputeBuffer(count, sizeof(uint) << 2); } else if (m_Data.count < count) { m_Data.Release(); m_Data = new ComputeBuffer(count, sizeof(uint) << 2); } var compute = context.resources.computeShaders.waveform; var cmd = context.command; cmd.BeginSample("Waveform"); var parameters = new Vector4( width, height, RuntimeUtilities.isLinearColorSpace ? 1 : 0, 0f ); // Clear the buffer on every frame int kernel = compute.FindKernel("KWaveformClear"); cmd.SetComputeBufferParam(compute, kernel, "_WaveformBuffer", m_Data); cmd.SetComputeVectorParam(compute, "_Params", parameters); cmd.DispatchCompute(compute, kernel, Mathf.CeilToInt(width / (float)m_ThreadGroupSizeX), Mathf.CeilToInt(height / (float)m_ThreadGroupSizeY), 1); // For performance reasons, especially on consoles, we'll just downscale the source // again to reduce VMEM stalls. Eventually the whole algorithm needs to be rewritten as // it's currently pretty naive. cmd.GetTemporaryRT(ShaderIDs.WaveformSource, width, height, 0, FilterMode.Bilinear, context.sourceFormat); cmd.BlitFullscreenTriangle(ShaderIDs.HalfResFinalCopy, ShaderIDs.WaveformSource); // Gather all pixels and fill in our waveform kernel = compute.FindKernel("KWaveformGather"); cmd.SetComputeBufferParam(compute, kernel, "_WaveformBuffer", m_Data); cmd.SetComputeTextureParam(compute, kernel, "_Source", ShaderIDs.WaveformSource); cmd.SetComputeVectorParam(compute, "_Params", parameters); cmd.DispatchCompute(compute, kernel, width, Mathf.CeilToInt(height / (float)m_ThreadGroupSize), 1); cmd.ReleaseTemporaryRT(ShaderIDs.WaveformSource); // Generate the waveform texture var sheet = context.propertySheets.Get(context.resources.shaders.waveform); sheet.properties.SetVector(ShaderIDs.Params, new Vector4(width, height, exposure, 0f)); sheet.properties.SetBuffer(ShaderIDs.WaveformBuffer, m_Data); cmd.BlitFullscreenTriangle(BuiltinRenderTextureType.None, output, sheet, 0); cmd.EndSample("Waveform"); } } }
109
orrb
openai
C#
namespace UnityEngine.Rendering.PostProcessing { public static class ColorUtilities { // An analytical model of chromaticity of the standard illuminant, by Judd et al. // http://en.wikipedia.org/wiki/Standard_illuminant#Illuminant_series_D // Slightly modifed to adjust it with the D65 white point (x=0.31271, y=0.32902). public static float StandardIlluminantY(float x) { return 2.87f * x - 3f * x * x - 0.27509507f; } // CIE xy chromaticity to CAT02 LMS. // http://en.wikipedia.org/wiki/LMS_color_space#CAT02 public static Vector3 CIExyToLMS(float x, float y) { float Y = 1f; float X = Y * x / y; float Z = Y * (1f - x - y) / y; float L = 0.7328f * X + 0.4296f * Y - 0.1624f * Z; float M = -0.7036f * X + 1.6975f * Y + 0.0061f * Z; float S = 0.0030f * X + 0.0136f * Y + 0.9834f * Z; return new Vector3(L, M, S); } public static Vector3 ComputeColorBalance(float temperature, float tint) { // Range ~[-1.67;1.67] works best float t1 = temperature / 60f; float t2 = tint / 60f; // Get the CIE xy chromaticity of the reference white point. // Note: 0.31271 = x value on the D65 white point float x = 0.31271f - t1 * (t1 < 0f ? 0.1f : 0.05f); float y = StandardIlluminantY(x) + t2 * 0.05f; // Calculate the coefficients in the LMS space. var w1 = new Vector3(0.949237f, 1.03542f, 1.08728f); // D65 white point var w2 = CIExyToLMS(x, y); return new Vector3(w1.x / w2.x, w1.y / w2.y, w1.z / w2.z); } // Alpha/w is offset public static Vector3 ColorToLift(Vector4 color) { // Shadows var S = new Vector3(color.x, color.y, color.z); float lumLift = S.x * 0.2126f + S.y * 0.7152f + S.z * 0.0722f; S = new Vector3(S.x - lumLift, S.y - lumLift, S.z - lumLift); float liftOffset = color.w; return new Vector3(S.x + liftOffset, S.y + liftOffset, S.z + liftOffset); } // Alpha/w is offset public static Vector3 ColorToInverseGamma(Vector4 color) { // Midtones var M = new Vector3(color.x, color.y, color.z); float lumGamma = M.x * 0.2126f + M.y * 0.7152f + M.z * 0.0722f; M = new Vector3(M.x - lumGamma, M.y - lumGamma, M.z - lumGamma); float gammaOffset = color.w + 1f; return new Vector3( 1f / Mathf.Max(M.x + gammaOffset, 1e-03f), 1f / Mathf.Max(M.y + gammaOffset, 1e-03f), 1f / Mathf.Max(M.z + gammaOffset, 1e-03f) ); } // Alpha/w is offset public static Vector3 ColorToGain(Vector4 color) { // Highlights var H = new Vector3(color.x, color.y, color.z); float lumGain = H.x * 0.2126f + H.y * 0.7152f + H.z * 0.0722f; H = new Vector3(H.x - lumGain, H.y - lumGain, H.z - lumGain); float gainOffset = color.w + 1f; return new Vector3(H.x + gainOffset, H.y + gainOffset, H.z + gainOffset); } // Alexa LogC converters (El 1000) // See http://www.vocas.nl/webfm_send/964 const float logC_cut = 0.011361f; const float logC_a = 5.555556f; const float logC_b = 0.047996f; const float logC_c = 0.244161f; const float logC_d = 0.386036f; const float logC_e = 5.301883f; const float logC_f = 0.092819f; public static float LogCToLinear(float x) { return x > logC_e * logC_cut + logC_f ? (Mathf.Pow(10f, (x - logC_d) / logC_c) - logC_b) / logC_a : (x - logC_f) / logC_e; } public static float LinearToLogC(float x) { return x > logC_cut ? logC_c * Mathf.Log10(logC_a * x + logC_b) + logC_d : logC_e * x + logC_f; } public static uint ToHex(Color c) { return ((uint)(c.a * 255) << 24) | ((uint)(c.r * 255) << 16) | ((uint)(c.g * 255) << 8) | ((uint)(c.b * 255)); } public static Color ToRGBA(uint hex) { return new Color( ((hex >> 16) & 0xff) / 255f, // r ((hex >> 8) & 0xff) / 255f, // g ((hex ) & 0xff) / 255f, // b ((hex >> 24) & 0xff) / 255f // a ); } } }
128
orrb
openai
C#
namespace UnityEngine.Rendering.PostProcessing { // Raw, mostly unoptimized implementation of Hable's artist-friendly tonemapping curve // http://filmicworlds.com/blog/filmic-tonemapping-with-piecewise-power-curves/ public class HableCurve { public class Segment { public float offsetX; public float offsetY; public float scaleX; public float scaleY; public float lnA; public float B; public float Eval(float x) { float x0 = (x - offsetX) * scaleX; float y0 = 0f; // log(0) is undefined but our function should evaluate to 0. There are better ways to handle this, // but it's doing it the slow way here for clarity. if (x0 > 0) y0 = Mathf.Exp(lnA + B * Mathf.Log(x0)); return y0 * scaleY + offsetY; } } struct DirectParams { internal float x0; internal float y0; internal float x1; internal float y1; internal float W; internal float overshootX; internal float overshootY; internal float gamma; } public float whitePoint { get; private set; } public float inverseWhitePoint { get; private set; } public float x0 { get; private set; } public float x1 { get; private set; } public readonly Segment[] segments = new Segment[3]; public HableCurve() { for (int i = 0; i < 3; i++) segments[i] = new Segment(); uniforms = new Uniforms(this); } public float Eval(float x) { float normX = x * inverseWhitePoint; int index = (normX < x0) ? 0 : ((normX < x1) ? 1 : 2); var segment = segments[index]; float ret = segment.Eval(normX); return ret; } public void Init(float toeStrength, float toeLength, float shoulderStrength, float shoulderLength, float shoulderAngle, float gamma) { var dstParams = new DirectParams(); // This is not actually the display gamma. It's just a UI space to avoid having to // enter small numbers for the input. const float kPerceptualGamma = 2.2f; // Constraints { toeLength = Mathf.Pow(Mathf.Clamp01(toeLength), kPerceptualGamma); toeStrength = Mathf.Clamp01(toeStrength); shoulderAngle = Mathf.Clamp01(shoulderAngle); shoulderStrength = Mathf.Clamp(shoulderStrength, 1e-5f, 1f - 1e-5f); shoulderLength = Mathf.Max(0f, shoulderLength); gamma = Mathf.Max(1e-5f, gamma); } // Apply base params { // Toe goes from 0 to 0.5 float x0 = toeLength * 0.5f; float y0 = (1f - toeStrength) * x0; // Lerp from 0 to x0 float remainingY = 1f - y0; float initialW = x0 + remainingY; float y1_offset = (1f - shoulderStrength) * remainingY; float x1 = x0 + y1_offset; float y1 = y0 + y1_offset; // Filmic shoulder strength is in F stops float extraW = RuntimeUtilities.Exp2(shoulderLength) - 1f; float W = initialW + extraW; dstParams.x0 = x0; dstParams.y0 = y0; dstParams.x1 = x1; dstParams.y1 = y1; dstParams.W = W; // Bake the linear to gamma space conversion dstParams.gamma = gamma; } dstParams.overshootX = (dstParams.W * 2f) * shoulderAngle * shoulderLength; dstParams.overshootY = 0.5f * shoulderAngle * shoulderLength; InitSegments(dstParams); } void InitSegments(DirectParams srcParams) { var paramsCopy = srcParams; whitePoint = srcParams.W; inverseWhitePoint = 1f / srcParams.W; // normalize params to 1.0 range paramsCopy.W = 1f; paramsCopy.x0 /= srcParams.W; paramsCopy.x1 /= srcParams.W; paramsCopy.overshootX = srcParams.overshootX / srcParams.W; float toeM = 0f; float shoulderM = 0f; { float m, b; AsSlopeIntercept(out m, out b, paramsCopy.x0, paramsCopy.x1, paramsCopy.y0, paramsCopy.y1); float g = srcParams.gamma; // Base function of linear section plus gamma is // y = (mx+b)^g // // which we can rewrite as // y = exp(g*ln(m) + g*ln(x+b/m)) // // and our evaluation function is (skipping the if parts): /* float x0 = (x - offsetX) * scaleX; y0 = exp(m_lnA + m_B*log(x0)); return y0*scaleY + m_offsetY; */ var midSegment = segments[1]; midSegment.offsetX = -(b / m); midSegment.offsetY = 0f; midSegment.scaleX = 1f; midSegment.scaleY = 1f; midSegment.lnA = g * Mathf.Log(m); midSegment.B = g; toeM = EvalDerivativeLinearGamma(m, b, g, paramsCopy.x0); shoulderM = EvalDerivativeLinearGamma(m, b, g, paramsCopy.x1); // apply gamma to endpoints paramsCopy.y0 = Mathf.Max(1e-5f, Mathf.Pow(paramsCopy.y0, paramsCopy.gamma)); paramsCopy.y1 = Mathf.Max(1e-5f, Mathf.Pow(paramsCopy.y1, paramsCopy.gamma)); paramsCopy.overshootY = Mathf.Pow(1f + paramsCopy.overshootY, paramsCopy.gamma) - 1f; } this.x0 = paramsCopy.x0; this.x1 = paramsCopy.x1; // Toe section { var toeSegment = segments[0]; toeSegment.offsetX = 0; toeSegment.offsetY = 0f; toeSegment.scaleX = 1f; toeSegment.scaleY = 1f; float lnA, B; SolveAB(out lnA, out B, paramsCopy.x0, paramsCopy.y0, toeM); toeSegment.lnA = lnA; toeSegment.B = B; } // Shoulder section { // Use the simple version that is usually too flat var shoulderSegment = segments[2]; float x0 = (1f + paramsCopy.overshootX) - paramsCopy.x1; float y0 = (1f + paramsCopy.overshootY) - paramsCopy.y1; float lnA, B; SolveAB(out lnA, out B, x0, y0, shoulderM); shoulderSegment.offsetX = (1f + paramsCopy.overshootX); shoulderSegment.offsetY = (1f + paramsCopy.overshootY); shoulderSegment.scaleX = -1f; shoulderSegment.scaleY = -1f; shoulderSegment.lnA = lnA; shoulderSegment.B = B; } // Normalize so that we hit 1.0 at our white point. We wouldn't have do this if we // skipped the overshoot part. { // Evaluate shoulder at the end of the curve float scale = segments[2].Eval(1f); float invScale = 1f / scale; segments[0].offsetY *= invScale; segments[0].scaleY *= invScale; segments[1].offsetY *= invScale; segments[1].scaleY *= invScale; segments[2].offsetY *= invScale; segments[2].scaleY *= invScale; } } // Find a function of the form: // f(x) = e^(lnA + Bln(x)) // where // f(0) = 0; not really a constraint // f(x0) = y0 // f'(x0) = m void SolveAB(out float lnA, out float B, float x0, float y0, float m) { B = (m * x0) / y0; lnA = Mathf.Log(y0) - B * Mathf.Log(x0); } // Convert to y=mx+b void AsSlopeIntercept(out float m, out float b, float x0, float x1, float y0, float y1) { float dy = (y1 - y0); float dx = (x1 - x0); if (dx == 0) m = 1f; else m = dy / dx; b = y0 - x0 * m; } // f(x) = (mx+b)^g // f'(x) = gm(mx+b)^(g-1) float EvalDerivativeLinearGamma(float m, float b, float g, float x) { float ret = g * m * Mathf.Pow(m * x + b, g - 1f); return ret; } // // Uniform building for ease of use // public class Uniforms { HableCurve parent; internal Uniforms(HableCurve parent) { this.parent = parent; } public Vector4 curve { get { return new Vector4(parent.inverseWhitePoint, parent.x0, parent.x1, 0f); } } public Vector4 toeSegmentA { get { var toe = parent.segments[0]; return new Vector4(toe.offsetX, toe.offsetY, toe.scaleX, toe.scaleY); } } public Vector4 toeSegmentB { get { var toe = parent.segments[0]; return new Vector4(toe.lnA, toe.B, 0f, 0f); } } public Vector4 midSegmentA { get { var mid = parent.segments[1]; return new Vector4(mid.offsetX, mid.offsetY, mid.scaleX, mid.scaleY); } } public Vector4 midSegmentB { get { var mid = parent.segments[1]; return new Vector4(mid.lnA, mid.B, 0f, 0f); } } public Vector4 shoSegmentA { get { var sho = parent.segments[2]; return new Vector4(sho.offsetX, sho.offsetY, sho.scaleX, sho.scaleY); } } public Vector4 shoSegmentB { get { var sho = parent.segments[2]; return new Vector4(sho.lnA, sho.B, 0f, 0f); } } } public readonly Uniforms uniforms; } }
337
orrb
openai
C#
namespace UnityEngine.Rendering.PostProcessing { public static class HaltonSeq { public static float Get(int index, int radix) { float result = 0f; float fraction = 1f / (float)radix; while (index > 0) { result += (float)(index % radix) * fraction; index /= radix; fraction /= (float)radix; } return result; } } }
22
orrb
openai
C#
namespace UnityEngine.Rendering.PostProcessing { public sealed class LogHistogram { public const int rangeMin = -9; // ev public const int rangeMax = 9; // ev // Don't forget to update 'ExposureHistogram.hlsl' if you change these values ! const int k_Bins = 128; int m_ThreadX; int m_ThreadY; public ComputeBuffer data { get; private set; } public void Generate(PostProcessRenderContext context) { if (data == null) { m_ThreadX = 16; m_ThreadY = RuntimeUtilities.isAndroidOpenGL ? 8 : 16; data = new ComputeBuffer (k_Bins, sizeof(uint)); } var scaleOffsetRes = GetHistogramScaleOffsetRes(context); var compute = context.resources.computeShaders.exposureHistogram; var cmd = context.command; cmd.BeginSample("LogHistogram"); // Clear the buffer on every frame as we use it to accumulate luminance values on each frame int kernel = compute.FindKernel("KEyeHistogramClear"); cmd.SetComputeBufferParam(compute, kernel, "_HistogramBuffer", data); cmd.DispatchCompute(compute, kernel, Mathf.CeilToInt(k_Bins / (float)m_ThreadX), 1, 1); // Get a log histogram kernel = compute.FindKernel("KEyeHistogram"); cmd.SetComputeBufferParam(compute, kernel, "_HistogramBuffer", data); cmd.SetComputeTextureParam(compute, kernel, "_Source", context.source); cmd.SetComputeVectorParam(compute, "_ScaleOffsetRes", scaleOffsetRes); cmd.DispatchCompute(compute, kernel, Mathf.CeilToInt(scaleOffsetRes.z / (float)m_ThreadX), Mathf.CeilToInt(scaleOffsetRes.w / (float)m_ThreadY), 1 ); cmd.EndSample("LogHistogram"); } public Vector4 GetHistogramScaleOffsetRes(PostProcessRenderContext context) { float diff = rangeMax - rangeMin; float scale = 1f / diff; float offset = -rangeMin * scale; return new Vector4(scale, offset, context.width, context.height); } public void Release() { if (data != null) data.Release(); data = null; } } }
65
orrb
openai
C#
using System; using System.Collections.Generic; using UnityEngine.Assertions; namespace UnityEngine.Rendering.PostProcessing { static class MeshUtilities { static Dictionary<PrimitiveType, Mesh> s_Primitives; static Dictionary<Type, PrimitiveType> s_ColliderPrimitives; static MeshUtilities() { s_Primitives = new Dictionary<PrimitiveType, Mesh>(); s_ColliderPrimitives = new Dictionary<Type, PrimitiveType> { { typeof(BoxCollider), PrimitiveType.Cube }, { typeof(SphereCollider), PrimitiveType.Sphere }, { typeof(CapsuleCollider), PrimitiveType.Capsule } }; } internal static Mesh GetColliderMesh(Collider collider) { var type = collider.GetType(); if (type == typeof(MeshCollider)) return ((MeshCollider)collider).sharedMesh; Assert.IsTrue(s_ColliderPrimitives.ContainsKey(type), "Unknown collider"); return GetPrimitive(s_ColliderPrimitives[type]); } internal static Mesh GetPrimitive(PrimitiveType primitiveType) { Mesh mesh; if (!s_Primitives.TryGetValue(primitiveType, out mesh)) { mesh = GetBuiltinMesh(primitiveType); s_Primitives.Add(primitiveType, mesh); } return mesh; } // (Not pretty) hack to get meshes from `unity default resources` in user land // What it does is create a new GameObject using the CreatePrimitive utility, retrieve its // mesh and discard it... static Mesh GetBuiltinMesh(PrimitiveType primitiveType) { var gameObject = GameObject.CreatePrimitive(primitiveType); var mesh = gameObject.GetComponent<MeshFilter>().sharedMesh; RuntimeUtilities.Destroy(gameObject); return mesh; } } }
59
orrb
openai
C#
namespace UnityEngine.Rendering.PostProcessing { public sealed class PropertySheet { public MaterialPropertyBlock properties { get; private set; } internal Material material { get; private set; } internal PropertySheet(Material material) { this.material = material; properties = new MaterialPropertyBlock(); } public void ClearKeywords() { material.shaderKeywords = null; } public void EnableKeyword(string keyword) { material.EnableKeyword(keyword); } public void DisableKeyword(string keyword) { material.DisableKeyword(keyword); } internal void Release() { RuntimeUtilities.Destroy(material); material = null; } } }
36
orrb
openai
C#
using System; using System.Collections.Generic; namespace UnityEngine.Rendering.PostProcessing { public sealed class PropertySheetFactory { readonly Dictionary<Shader, PropertySheet> m_Sheets; public PropertySheetFactory() { m_Sheets = new Dictionary<Shader, PropertySheet>(); } public PropertySheet Get(string shaderName) { return Get(Shader.Find(shaderName)); } public PropertySheet Get(Shader shader) { PropertySheet sheet; if (m_Sheets.TryGetValue(shader, out sheet)) return sheet; if (shader == null) throw new ArgumentException(string.Format("Invalid shader ({0})", shader)); var shaderName = shader.name; var material = new Material(shader) { name = string.Format("PostProcess - {0}", shaderName.Substring(shaderName.LastIndexOf('/') + 1)), hideFlags = HideFlags.DontSave }; sheet = new PropertySheet(material); m_Sheets.Add(shader, sheet); return sheet; } public void Release() { foreach (var sheet in m_Sheets.Values) sheet.Release(); m_Sheets.Clear(); } } }
51
orrb
openai
C#
using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Reflection; using System.Text; using UnityEngine.Assertions; namespace UnityEngine.Rendering.PostProcessing { using SceneManagement; using UnityObject = UnityEngine.Object; public static class RuntimeUtilities { #region Textures static Texture2D m_WhiteTexture; public static Texture2D whiteTexture { get { if (m_WhiteTexture == null) { m_WhiteTexture = new Texture2D(1, 1, TextureFormat.ARGB32, false); m_WhiteTexture.SetPixel(0, 0, Color.white); m_WhiteTexture.Apply(); } return m_WhiteTexture; } } static Texture2D m_BlackTexture; public static Texture2D blackTexture { get { if (m_BlackTexture == null) { m_BlackTexture = new Texture2D(1, 1, TextureFormat.ARGB32, false); m_BlackTexture.SetPixel(0, 0, Color.black); m_BlackTexture.Apply(); } return m_BlackTexture; } } static Texture2D m_TransparentTexture; public static Texture2D transparentTexture { get { if (m_TransparentTexture == null) { m_TransparentTexture = new Texture2D(1, 1, TextureFormat.ARGB32, false); m_TransparentTexture.SetPixel(0, 0, Color.clear); m_TransparentTexture.Apply(); } return m_TransparentTexture; } } #endregion #region Rendering static Mesh s_FullscreenTriangle; public static Mesh fullscreenTriangle { get { if (s_FullscreenTriangle != null) return s_FullscreenTriangle; s_FullscreenTriangle = new Mesh { name = "Fullscreen Triangle" }; // Because we have to support older platforms (GLES2/3, DX9 etc) we can't do all of // this directly in the vertex shader using vertex ids :( s_FullscreenTriangle.SetVertices(new List<Vector3> { new Vector3(-1f, -1f, 0f), new Vector3(-1f, 3f, 0f), new Vector3( 3f, -1f, 0f) }); s_FullscreenTriangle.SetIndices(new [] { 0, 1, 2 }, MeshTopology.Triangles, 0, false); s_FullscreenTriangle.UploadMeshData(false); return s_FullscreenTriangle; } } static Material s_CopyStdMaterial; public static Material copyStdMaterial { get { if (s_CopyStdMaterial != null) return s_CopyStdMaterial; var shader = Shader.Find("Hidden/PostProcessing/CopyStd"); s_CopyStdMaterial = new Material(shader) { name = "PostProcess - CopyStd", hideFlags = HideFlags.HideAndDontSave }; return s_CopyStdMaterial; } } static Material s_CopyMaterial; public static Material copyMaterial { get { if (s_CopyMaterial != null) return s_CopyMaterial; var shader = Shader.Find("Hidden/PostProcessing/Copy"); s_CopyMaterial = new Material(shader) { name = "PostProcess - Copy", hideFlags = HideFlags.HideAndDontSave }; return s_CopyMaterial; } } static PropertySheet s_CopySheet; public static PropertySheet copySheet { get { if (s_CopySheet == null) s_CopySheet = new PropertySheet(copyMaterial); return s_CopySheet; } } // Use a custom blit method to draw a fullscreen triangle instead of a fullscreen quad // https://michaldrobot.com/2014/04/01/gcn-execution-patterns-in-full-screen-passes/ public static void BlitFullscreenTriangle(this CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, bool clear = false) { cmd.SetGlobalTexture(ShaderIDs.MainTex, source); cmd.SetRenderTarget(destination); if (clear) cmd.ClearRenderTarget(true, true, Color.clear); cmd.DrawMesh(fullscreenTriangle, Matrix4x4.identity, copyMaterial, 0, 0); } public static void BlitFullscreenTriangle(this CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, PropertySheet propertySheet, int pass, bool clear = false) { cmd.SetGlobalTexture(ShaderIDs.MainTex, source); cmd.SetRenderTarget(destination); if (clear) cmd.ClearRenderTarget(true, true, Color.clear); cmd.DrawMesh(fullscreenTriangle, Matrix4x4.identity, propertySheet.material, 0, pass, propertySheet.properties); } public static void BlitFullscreenTriangle(this CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination, RenderTargetIdentifier depth, PropertySheet propertySheet, int pass, bool clear = false) { cmd.SetGlobalTexture(ShaderIDs.MainTex, source); cmd.SetRenderTarget(destination, depth); if (clear) cmd.ClearRenderTarget(true, true, Color.clear); cmd.DrawMesh(fullscreenTriangle, Matrix4x4.identity, propertySheet.material, 0, pass, propertySheet.properties); } public static void BlitFullscreenTriangle(this CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier[] destinations, RenderTargetIdentifier depth, PropertySheet propertySheet, int pass, bool clear = false) { cmd.SetGlobalTexture(ShaderIDs.MainTex, source); cmd.SetRenderTarget(destinations, depth); if (clear) cmd.ClearRenderTarget(true, true, Color.clear); cmd.DrawMesh(fullscreenTriangle, Matrix4x4.identity, propertySheet.material, 0, pass, propertySheet.properties); } public static void BlitFullscreenTriangle(Texture source, RenderTexture destination, Material material, int pass) { var oldRt = RenderTexture.active; material.SetPass(pass); if (source != null) material.SetTexture(ShaderIDs.MainTex, source); Graphics.SetRenderTarget(destination); Graphics.DrawMeshNow(fullscreenTriangle, Matrix4x4.identity); RenderTexture.active = oldRt; } // Fast basic copy texture if available, falls back to blit copy if not // Assumes that both textures have the exact same type and format public static void CopyTexture(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier destination) { if (SystemInfo.copyTextureSupport > CopyTextureSupport.None) { cmd.CopyTexture(source, destination); return; } cmd.BlitFullscreenTriangle(source, destination); } // TODO: Generalize the GetTemporaryRT and Blit commands in order to support // RT Arrays for Stereo Instancing/MultiView #endregion #region Unity specifics & misc methods public static bool scriptableRenderPipelineActive { get { return GraphicsSettings.renderPipelineAsset != null; } // 5.6+ only } #if UNITY_EDITOR public static bool isSinglePassStereoSelected { get { return UnityEditor.PlayerSettings.virtualRealitySupported && UnityEditor.PlayerSettings.stereoRenderingPath == UnityEditor.StereoRenderingPath.SinglePass; } } #endif // TODO: Check for SPSR support at runtime public static bool isSinglePassStereoEnabled { get { #if UNITY_EDITOR return isSinglePassStereoSelected && Application.isPlaying; #elif UNITY_2017_2_OR_NEWER return UnityEngine.XR.XRSettings.eyeTextureDesc.vrUsage == VRTextureUsage.TwoEyes; #else return false; #endif } } public static bool isVREnabled { get { #if UNITY_EDITOR return UnityEditor.PlayerSettings.virtualRealitySupported; #elif UNITY_XBOXONE return false; #elif UNITY_2017_2_OR_NEWER return UnityEngine.XR.XRSettings.enabled; #elif UNITY_5_6_OR_NEWER return UnityEngine.VR.VRSettings.enabled; #endif } } public static bool isAndroidOpenGL { get { return Application.platform == RuntimePlatform.Android && SystemInfo.graphicsDeviceType != GraphicsDeviceType.Vulkan; } } public static void Destroy(UnityObject obj) { if (obj != null) { #if UNITY_EDITOR if (Application.isPlaying) UnityObject.Destroy(obj); else UnityObject.DestroyImmediate(obj); #else UnityObject.Destroy(obj); #endif } } public static bool isLinearColorSpace { get { return QualitySettings.activeColorSpace == ColorSpace.Linear; } } public static bool IsResolvedDepthAvailable(Camera camera) { // AFAIK resolved depth is only available on D3D11/12 via BuiltinRenderTextureType.ResolvedDepth // TODO: Is there more proper way to determine this? What about SRPs? var gtype = SystemInfo.graphicsDeviceType; return camera.actualRenderingPath == RenderingPath.DeferredShading && (gtype == GraphicsDeviceType.Direct3D11 || gtype == GraphicsDeviceType.Direct3D12 || gtype == GraphicsDeviceType.XboxOne); } public static void DestroyProfile(PostProcessProfile profile, bool destroyEffects) { if (destroyEffects) { foreach (var effect in profile.settings) Destroy(effect); } Destroy(profile); } public static void DestroyVolume(PostProcessVolume volume, bool destroySharedProfile) { if (destroySharedProfile) DestroyProfile(volume.sharedProfile, true); Destroy(volume); } // Returns ALL scene objects in the hierarchy, included inactive objects // Beware, this method will be slow for big scenes public static IEnumerable<T> GetAllSceneObjects<T>() where T : Component { var queue = new Queue<Transform>(); var roots = SceneManager.GetActiveScene().GetRootGameObjects(); foreach (var root in roots) { queue.Enqueue(root.transform); var comp = root.GetComponent<T>(); if (comp != null) yield return comp; } while (queue.Count > 0) { foreach (Transform child in queue.Dequeue()) { queue.Enqueue(child); var comp = child.GetComponent<T>(); if (comp != null) yield return comp; } } } public static void CreateIfNull<T>(ref T obj) where T : class, new() { if (obj == null) obj = new T(); } #endregion #region Maths public static float Exp2(float x) { return Mathf.Exp(x * 0.69314718055994530941723212145818f); } // Adapted heavily from PlayDead's TAA code // https://github.com/playdeadgames/temporal/blob/master/Assets/Scripts/Extensions.cs public static Matrix4x4 GetJitteredPerspectiveProjectionMatrix(Camera camera, Vector2 offset) { float vertical = Mathf.Tan(0.5f * Mathf.Deg2Rad * camera.fieldOfView); float horizontal = vertical * camera.aspect; float near = camera.nearClipPlane; float far = camera.farClipPlane; offset.x *= horizontal / (0.5f * camera.pixelWidth); offset.y *= vertical / (0.5f * camera.pixelHeight); float left = (offset.x - horizontal) * near; float right = (offset.x + horizontal) * near; float top = (offset.y + vertical) * near; float bottom = (offset.y - vertical) * near; var matrix = new Matrix4x4(); matrix[0, 0] = (2f * near) / (right - left); matrix[0, 1] = 0f; matrix[0, 2] = (right + left) / (right - left); matrix[0, 3] = 0f; matrix[1, 0] = 0f; matrix[1, 1] = (2f * near) / (top - bottom); matrix[1, 2] = (top + bottom) / (top - bottom); matrix[1, 3] = 0f; matrix[2, 0] = 0f; matrix[2, 1] = 0f; matrix[2, 2] = -(far + near) / (far - near); matrix[2, 3] = -(2f * far * near) / (far - near); matrix[3, 0] = 0f; matrix[3, 1] = 0f; matrix[3, 2] = -1f; matrix[3, 3] = 0f; return matrix; } public static Matrix4x4 GetJitteredOrthographicProjectionMatrix(Camera camera, Vector2 offset) { float vertical = camera.orthographicSize; float horizontal = vertical * camera.aspect; offset.x *= horizontal / (0.5f * camera.pixelWidth); offset.y *= vertical / (0.5f * camera.pixelHeight); float left = offset.x - horizontal; float right = offset.x + horizontal; float top = offset.y + vertical; float bottom = offset.y - vertical; return Matrix4x4.Ortho(left, right, bottom, top, camera.nearClipPlane, camera.farClipPlane); } public static Matrix4x4 GenerateJitteredProjectionMatrixFromOriginal(PostProcessRenderContext context, Matrix4x4 origProj, Vector2 jitter) { #if UNITY_2017_2_OR_NEWER var planes = origProj.decomposeProjection; float vertFov = Math.Abs(planes.top) + Math.Abs(planes.bottom); float horizFov = Math.Abs(planes.left) + Math.Abs(planes.right); var planeJitter = new Vector2(jitter.x * horizFov / context.screenWidth, jitter.y * vertFov / context.screenHeight); planes.left += planeJitter.x; planes.right += planeJitter.x; planes.top += planeJitter.y; planes.bottom += planeJitter.y; var jitteredMatrix = Matrix4x4.Frustum(planes); return jitteredMatrix; #else var rTan = (1.0f + origProj[0, 2]) / origProj[0, 0]; var lTan = (-1.0f + origProj[0, 2]) / origProj[0, 0]; var tTan = (1.0f + origProj[1, 2]) / origProj[1, 1]; var bTan = (-1.0f + origProj[1, 2]) / origProj[1, 1]; float tanVertFov = Math.Abs(tTan) + Math.Abs(bTan); float tanHorizFov = Math.Abs(lTan) + Math.Abs(rTan); jitter.x *= tanHorizFov / context.screenWidth; jitter.y *= tanVertFov / context.screenHeight; float left = jitter.x + lTan; float right = jitter.x + rTan; float top = jitter.y + tTan; float bottom = jitter.y + bTan; var jitteredMatrix = new Matrix4x4(); jitteredMatrix[0, 0] = 2f / (right - left); jitteredMatrix[0, 1] = 0f; jitteredMatrix[0, 2] = (right + left) / (right - left); jitteredMatrix[0, 3] = 0f; jitteredMatrix[1, 0] = 0f; jitteredMatrix[1, 1] = 2f / (top - bottom); jitteredMatrix[1, 2] = (top + bottom) / (top - bottom); jitteredMatrix[1, 3] = 0f; jitteredMatrix[2, 0] = 0f; jitteredMatrix[2, 1] = 0f; jitteredMatrix[2, 2] = origProj[2, 2]; jitteredMatrix[2, 3] = origProj[2, 3]; jitteredMatrix[3, 0] = 0f; jitteredMatrix[3, 1] = 0f; jitteredMatrix[3, 2] = -1f; jitteredMatrix[3, 3] = 0f; return jitteredMatrix; #endif } #endregion #region Reflection public static IEnumerable<Type> GetAllAssemblyTypes() { return AppDomain.CurrentDomain.GetAssemblies() .SelectMany(t => { // Ugly hack to handle mis-versioned dlls var innerTypes = new Type[0]; try { innerTypes = t.GetTypes(); } catch {} return innerTypes; }); } // Quick extension method to get the first attribute of type T on a given Type public static T GetAttribute<T>(this Type type) where T : Attribute { Assert.IsTrue(type.IsDefined(typeof(T), false), "Attribute not found"); return (T)type.GetCustomAttributes(typeof(T), false)[0]; } // Returns all attributes set on a specific member // Note: doesn't include inherited attributes, only explicit ones public static Attribute[] GetMemberAttributes<TType, TValue>(Expression<Func<TType, TValue>> expr) { Expression body = expr; if (body is LambdaExpression) body = ((LambdaExpression)body).Body; switch (body.NodeType) { case ExpressionType.MemberAccess: var fi = (FieldInfo)((MemberExpression)body).Member; return fi.GetCustomAttributes(false).Cast<Attribute>().ToArray(); default: throw new InvalidOperationException(); } } // Returns a string path from an expression - mostly used to retrieve serialized properties // without hardcoding the field path. Safer, and allows for proper refactoring. public static string GetFieldPath<TType, TValue>(Expression<Func<TType, TValue>> expr) { MemberExpression me; switch (expr.Body.NodeType) { case ExpressionType.MemberAccess: me = expr.Body as MemberExpression; break; default: throw new InvalidOperationException(); } var members = new List<string>(); while (me != null) { members.Add(me.Member.Name); me = me.Expression as MemberExpression; } var sb = new StringBuilder(); for (int i = members.Count - 1; i >= 0; i--) { sb.Append(members[i]); if (i > 0) sb.Append('.'); } return sb.ToString(); } public static object GetParentObject(string path, object obj) { var fields = path.Split('.'); if (fields.Length == 1) return obj; var info = obj.GetType().GetField(fields[0], BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); obj = info.GetValue(obj); return GetParentObject(string.Join(".", fields, 1, fields.Length - 1), obj); } #endregion } }
584
orrb
openai
C#
namespace UnityEngine.Rendering.PostProcessing { // Pre-hashed shader ids - naming conventions are a bit off in this file as we use the same // fields names as in the shaders for ease of use... Would be nice to clean this up at some // point. static class ShaderIDs { internal static readonly int MainTex = Shader.PropertyToID("_MainTex"); internal static readonly int Jitter = Shader.PropertyToID("_Jitter"); internal static readonly int Sharpness = Shader.PropertyToID("_Sharpness"); internal static readonly int FinalBlendParameters = Shader.PropertyToID("_FinalBlendParameters"); internal static readonly int HistoryTex = Shader.PropertyToID("_HistoryTex"); internal static readonly int SMAA_Flip = Shader.PropertyToID("_SMAA_Flip"); internal static readonly int SMAA_Flop = Shader.PropertyToID("_SMAA_Flop"); internal static readonly int AOParams = Shader.PropertyToID("_AOParams"); internal static readonly int AOColor = Shader.PropertyToID("_AOColor"); internal static readonly int OcclusionTexture1 = Shader.PropertyToID("_OcclusionTexture1"); internal static readonly int OcclusionTexture2 = Shader.PropertyToID("_OcclusionTexture2"); internal static readonly int SAOcclusionTexture = Shader.PropertyToID("_SAOcclusionTexture"); internal static readonly int MSVOcclusionTexture = Shader.PropertyToID("_MSVOcclusionTexture"); internal static readonly int DepthCopy = Shader.PropertyToID("DepthCopy"); internal static readonly int LinearDepth = Shader.PropertyToID("LinearDepth"); internal static readonly int LowDepth1 = Shader.PropertyToID("LowDepth1"); internal static readonly int LowDepth2 = Shader.PropertyToID("LowDepth2"); internal static readonly int LowDepth3 = Shader.PropertyToID("LowDepth3"); internal static readonly int LowDepth4 = Shader.PropertyToID("LowDepth4"); internal static readonly int TiledDepth1 = Shader.PropertyToID("TiledDepth1"); internal static readonly int TiledDepth2 = Shader.PropertyToID("TiledDepth2"); internal static readonly int TiledDepth3 = Shader.PropertyToID("TiledDepth3"); internal static readonly int TiledDepth4 = Shader.PropertyToID("TiledDepth4"); internal static readonly int Occlusion1 = Shader.PropertyToID("Occlusion1"); internal static readonly int Occlusion2 = Shader.PropertyToID("Occlusion2"); internal static readonly int Occlusion3 = Shader.PropertyToID("Occlusion3"); internal static readonly int Occlusion4 = Shader.PropertyToID("Occlusion4"); internal static readonly int Combined1 = Shader.PropertyToID("Combined1"); internal static readonly int Combined2 = Shader.PropertyToID("Combined2"); internal static readonly int Combined3 = Shader.PropertyToID("Combined3"); internal static readonly int SSRResolveTemp = Shader.PropertyToID("_SSRResolveTemp"); internal static readonly int Noise = Shader.PropertyToID("_Noise"); internal static readonly int Test = Shader.PropertyToID("_Test"); internal static readonly int Resolve = Shader.PropertyToID("_Resolve"); internal static readonly int History = Shader.PropertyToID("_History"); internal static readonly int ViewMatrix = Shader.PropertyToID("_ViewMatrix"); internal static readonly int InverseViewMatrix = Shader.PropertyToID("_InverseViewMatrix"); internal static readonly int InverseProjectionMatrix = Shader.PropertyToID("_InverseProjectionMatrix"); internal static readonly int ScreenSpaceProjectionMatrix = Shader.PropertyToID("_ScreenSpaceProjectionMatrix"); internal static readonly int Params2 = Shader.PropertyToID("_Params2"); internal static readonly int FogColor = Shader.PropertyToID("_FogColor"); internal static readonly int FogParams = Shader.PropertyToID("_FogParams"); internal static readonly int VelocityScale = Shader.PropertyToID("_VelocityScale"); internal static readonly int MaxBlurRadius = Shader.PropertyToID("_MaxBlurRadius"); internal static readonly int RcpMaxBlurRadius = Shader.PropertyToID("_RcpMaxBlurRadius"); internal static readonly int VelocityTex = Shader.PropertyToID("_VelocityTex"); internal static readonly int Tile2RT = Shader.PropertyToID("_Tile2RT"); internal static readonly int Tile4RT = Shader.PropertyToID("_Tile4RT"); internal static readonly int Tile8RT = Shader.PropertyToID("_Tile8RT"); internal static readonly int TileMaxOffs = Shader.PropertyToID("_TileMaxOffs"); internal static readonly int TileMaxLoop = Shader.PropertyToID("_TileMaxLoop"); internal static readonly int TileVRT = Shader.PropertyToID("_TileVRT"); internal static readonly int NeighborMaxTex = Shader.PropertyToID("_NeighborMaxTex"); internal static readonly int LoopCount = Shader.PropertyToID("_LoopCount"); internal static readonly int DepthOfFieldTemp = Shader.PropertyToID("_DepthOfFieldTemp"); internal static readonly int DepthOfFieldTex = Shader.PropertyToID("_DepthOfFieldTex"); internal static readonly int Distance = Shader.PropertyToID("_Distance"); internal static readonly int LensCoeff = Shader.PropertyToID("_LensCoeff"); internal static readonly int MaxCoC = Shader.PropertyToID("_MaxCoC"); internal static readonly int RcpMaxCoC = Shader.PropertyToID("_RcpMaxCoC"); internal static readonly int RcpAspect = Shader.PropertyToID("_RcpAspect"); internal static readonly int CoCTex = Shader.PropertyToID("_CoCTex"); internal static readonly int TaaParams = Shader.PropertyToID("_TaaParams"); internal static readonly int AutoExposureTex = Shader.PropertyToID("_AutoExposureTex"); internal static readonly int HistogramBuffer = Shader.PropertyToID("_HistogramBuffer"); internal static readonly int Params = Shader.PropertyToID("_Params"); internal static readonly int Speed = Shader.PropertyToID("_Speed"); internal static readonly int ScaleOffsetRes = Shader.PropertyToID("_ScaleOffsetRes"); internal static readonly int ExposureCompensation = Shader.PropertyToID("_ExposureCompensation"); internal static readonly int BloomTex = Shader.PropertyToID("_BloomTex"); internal static readonly int SampleScale = Shader.PropertyToID("_SampleScale"); internal static readonly int Threshold = Shader.PropertyToID("_Threshold"); internal static readonly int ColorIntensity = Shader.PropertyToID("_ColorIntensity"); internal static readonly int Bloom_DirtTex = Shader.PropertyToID("_Bloom_DirtTex"); internal static readonly int Bloom_Settings = Shader.PropertyToID("_Bloom_Settings"); internal static readonly int Bloom_Color = Shader.PropertyToID("_Bloom_Color"); internal static readonly int Bloom_DirtTileOffset = Shader.PropertyToID("_Bloom_DirtTileOffset"); internal static readonly int ChromaticAberration_Amount = Shader.PropertyToID("_ChromaticAberration_Amount"); internal static readonly int ChromaticAberration_SpectralLut = Shader.PropertyToID("_ChromaticAberration_SpectralLut"); internal static readonly int Lut2D = Shader.PropertyToID("_Lut2D"); internal static readonly int Lut3D = Shader.PropertyToID("_Lut3D"); internal static readonly int Lut3D_Params = Shader.PropertyToID("_Lut3D_Params"); internal static readonly int Lut2D_Params = Shader.PropertyToID("_Lut2D_Params"); internal static readonly int PostExposure = Shader.PropertyToID("_PostExposure"); internal static readonly int ColorBalance = Shader.PropertyToID("_ColorBalance"); internal static readonly int ColorFilter = Shader.PropertyToID("_ColorFilter"); internal static readonly int HueSatCon = Shader.PropertyToID("_HueSatCon"); internal static readonly int Brightness = Shader.PropertyToID("_Brightness"); internal static readonly int ChannelMixerRed = Shader.PropertyToID("_ChannelMixerRed"); internal static readonly int ChannelMixerGreen = Shader.PropertyToID("_ChannelMixerGreen"); internal static readonly int ChannelMixerBlue = Shader.PropertyToID("_ChannelMixerBlue"); internal static readonly int Lift = Shader.PropertyToID("_Lift"); internal static readonly int InvGamma = Shader.PropertyToID("_InvGamma"); internal static readonly int Gain = Shader.PropertyToID("_Gain"); internal static readonly int Curves = Shader.PropertyToID("_Curves"); internal static readonly int CustomToneCurve = Shader.PropertyToID("_CustomToneCurve"); internal static readonly int ToeSegmentA = Shader.PropertyToID("_ToeSegmentA"); internal static readonly int ToeSegmentB = Shader.PropertyToID("_ToeSegmentB"); internal static readonly int MidSegmentA = Shader.PropertyToID("_MidSegmentA"); internal static readonly int MidSegmentB = Shader.PropertyToID("_MidSegmentB"); internal static readonly int ShoSegmentA = Shader.PropertyToID("_ShoSegmentA"); internal static readonly int ShoSegmentB = Shader.PropertyToID("_ShoSegmentB"); internal static readonly int Vignette_Color = Shader.PropertyToID("_Vignette_Color"); internal static readonly int Vignette_Center = Shader.PropertyToID("_Vignette_Center"); internal static readonly int Vignette_Settings = Shader.PropertyToID("_Vignette_Settings"); internal static readonly int Vignette_Mask = Shader.PropertyToID("_Vignette_Mask"); internal static readonly int Vignette_Opacity = Shader.PropertyToID("_Vignette_Opacity"); internal static readonly int Vignette_Mode = Shader.PropertyToID("_Vignette_Mode"); internal static readonly int Grain_Params1 = Shader.PropertyToID("_Grain_Params1"); internal static readonly int Grain_Params2 = Shader.PropertyToID("_Grain_Params2"); internal static readonly int GrainTex = Shader.PropertyToID("_GrainTex"); internal static readonly int Phase = Shader.PropertyToID("_Phase"); internal static readonly int LumaInAlpha = Shader.PropertyToID("_LumaInAlpha"); internal static readonly int DitheringTex = Shader.PropertyToID("_DitheringTex"); internal static readonly int Dithering_Coords = Shader.PropertyToID("_Dithering_Coords"); internal static readonly int From = Shader.PropertyToID("_From"); internal static readonly int To = Shader.PropertyToID("_To"); internal static readonly int Interp = Shader.PropertyToID("_Interp"); internal static readonly int HalfResFinalCopy = Shader.PropertyToID("_HalfResFinalCopy"); internal static readonly int WaveformSource = Shader.PropertyToID("_WaveformSource"); internal static readonly int WaveformBuffer = Shader.PropertyToID("_WaveformBuffer"); internal static readonly int VectorscopeBuffer = Shader.PropertyToID("_VectorscopeBuffer"); internal static readonly int RenderViewportScaleFactor = Shader.PropertyToID("_RenderViewportScaleFactor"); } }
151
orrb
openai
C#
using System; using UnityEngine.Assertions; namespace UnityEngine.Rendering.PostProcessing { // Small wrapper on top of AnimationCurve to handle zero-key curves and keyframe looping [Serializable] public sealed class Spline { public const int k_Precision = 128; public const float k_Step = 1f / k_Precision; public AnimationCurve curve; [SerializeField] bool m_Loop; [SerializeField] float m_ZeroValue; [SerializeField] float m_Range; AnimationCurve m_InternalLoopingCurve; // Used to track frame changes for data caching int frameCount = -1; // Instead of trying to be smart and blend two curves by generating a new one, we'll simply // store the curve data in a float array and blend these instead. internal float[] cachedData; public Spline(AnimationCurve curve, float zeroValue, bool loop, Vector2 bounds) { Assert.IsNotNull(curve); this.curve = curve; m_ZeroValue = zeroValue; m_Loop = loop; m_Range = bounds.magnitude; cachedData = new float[k_Precision]; } public void Cache(int frame) { // Only cache once per frame if (frame == frameCount) return; var length = curve.length; if (m_Loop && length > 1) { if (m_InternalLoopingCurve == null) m_InternalLoopingCurve = new AnimationCurve(); var prev = curve[length - 1]; prev.time -= m_Range; var next = curve[0]; next.time += m_Range; m_InternalLoopingCurve.keys = curve.keys; m_InternalLoopingCurve.AddKey(prev); m_InternalLoopingCurve.AddKey(next); } for (int i = 0; i < k_Precision; i++) cachedData[i] = Evaluate((float)i * k_Step); frameCount = Time.renderedFrameCount; } public float Evaluate(float t) { if (curve.length == 0) return m_ZeroValue; if (!m_Loop || curve.length == 1) return curve.Evaluate(t); return m_InternalLoopingCurve.Evaluate(t); } public override int GetHashCode() { unchecked { int hash = 17; hash = hash * 23 + curve.GetHashCode(); // Not implemented in Unity, so it'll always return the same value :( return hash; } } } }
93
orrb
openai
C#
using System.Collections.Generic; namespace UnityEngine.Rendering.PostProcessing { class TargetPool { readonly List<int> m_Pool; int m_Current; internal TargetPool() { m_Pool = new List<int>(); Get(); // Pre-warm with a default target to avoid black frame on first frame } internal int Get() { int ret = Get(m_Current); m_Current++; return ret; } int Get(int i) { int ret; if (m_Pool.Count > i) { ret = m_Pool[i]; } else { // Avoid discontinuities while (m_Pool.Count <= i) m_Pool.Add(Shader.PropertyToID("_TargetPool" + i)); ret = m_Pool[i]; } return ret; } internal void Reset() { m_Current = 0; } } }
49
orrb
openai
C#
using System; using System.Collections.Generic; using UnityEngine.Assertions; namespace UnityEngine.Rendering.PostProcessing { // Temporary code dump until the texture format refactor goes into trunk... public static class TextureFormatUtilities { static Dictionary<TextureFormat, RenderTextureFormat> s_FormatMap; static TextureFormatUtilities() { s_FormatMap = new Dictionary<TextureFormat, RenderTextureFormat> { { TextureFormat.Alpha8, RenderTextureFormat.ARGB32 }, { TextureFormat.ARGB4444, RenderTextureFormat.ARGB4444 }, { TextureFormat.RGB24, RenderTextureFormat.ARGB32 }, { TextureFormat.RGBA32, RenderTextureFormat.ARGB32 }, { TextureFormat.ARGB32, RenderTextureFormat.ARGB32 }, { TextureFormat.RGB565, RenderTextureFormat.RGB565 }, { TextureFormat.R16, RenderTextureFormat.RHalf }, { TextureFormat.DXT1, RenderTextureFormat.ARGB32 }, { TextureFormat.DXT5, RenderTextureFormat.ARGB32 }, { TextureFormat.RGBA4444, RenderTextureFormat.ARGB4444 }, { TextureFormat.BGRA32, RenderTextureFormat.ARGB32 }, { TextureFormat.RHalf, RenderTextureFormat.RHalf }, { TextureFormat.RGHalf, RenderTextureFormat.RGHalf }, { TextureFormat.RGBAHalf, RenderTextureFormat.ARGBHalf }, { TextureFormat.RFloat, RenderTextureFormat.RFloat }, { TextureFormat.RGFloat, RenderTextureFormat.RGFloat }, { TextureFormat.RGBAFloat, RenderTextureFormat.ARGBFloat }, { TextureFormat.RGB9e5Float, RenderTextureFormat.ARGBHalf }, { TextureFormat.BC4, RenderTextureFormat.R8 }, { TextureFormat.BC5, RenderTextureFormat.RGHalf }, { TextureFormat.BC6H, RenderTextureFormat.ARGBHalf }, { TextureFormat.BC7, RenderTextureFormat.ARGB32 }, #if !UNITY_IOS && !UNITY_TVOS { TextureFormat.DXT1Crunched, RenderTextureFormat.ARGB32 }, { TextureFormat.DXT5Crunched, RenderTextureFormat.ARGB32 }, #endif { TextureFormat.PVRTC_RGB2, RenderTextureFormat.ARGB32 }, { TextureFormat.PVRTC_RGBA2, RenderTextureFormat.ARGB32 }, { TextureFormat.PVRTC_RGB4, RenderTextureFormat.ARGB32 }, { TextureFormat.PVRTC_RGBA4, RenderTextureFormat.ARGB32 }, #if !UNITY_2018_1_OR_NEWER { TextureFormat.ATC_RGB4, RenderTextureFormat.ARGB32 }, { TextureFormat.ATC_RGBA8, RenderTextureFormat.ARGB32 }, #endif { TextureFormat.ETC_RGB4, RenderTextureFormat.ARGB32 }, { TextureFormat.ETC2_RGB, RenderTextureFormat.ARGB32 }, { TextureFormat.ETC2_RGBA1, RenderTextureFormat.ARGB32 }, { TextureFormat.ETC2_RGBA8, RenderTextureFormat.ARGB32 }, { TextureFormat.ASTC_RGB_4x4, RenderTextureFormat.ARGB32 }, { TextureFormat.ASTC_RGB_5x5, RenderTextureFormat.ARGB32 }, { TextureFormat.ASTC_RGB_6x6, RenderTextureFormat.ARGB32 }, { TextureFormat.ASTC_RGB_8x8, RenderTextureFormat.ARGB32 }, { TextureFormat.ASTC_RGB_10x10, RenderTextureFormat.ARGB32 }, { TextureFormat.ASTC_RGB_12x12, RenderTextureFormat.ARGB32 }, { TextureFormat.ASTC_RGBA_4x4, RenderTextureFormat.ARGB32 }, { TextureFormat.ASTC_RGBA_5x5, RenderTextureFormat.ARGB32 }, { TextureFormat.ASTC_RGBA_6x6, RenderTextureFormat.ARGB32 }, { TextureFormat.ASTC_RGBA_8x8, RenderTextureFormat.ARGB32 }, { TextureFormat.ASTC_RGBA_10x10, RenderTextureFormat.ARGB32 }, { TextureFormat.ASTC_RGBA_12x12, RenderTextureFormat.ARGB32 }, { TextureFormat.ETC_RGB4_3DS, RenderTextureFormat.ARGB32 }, { TextureFormat.ETC_RGBA8_3DS, RenderTextureFormat.ARGB32 } }; } public static RenderTextureFormat GetUncompressedRenderTextureFormat(Texture texture) { Assert.IsNotNull(texture); if (texture is RenderTexture) return (texture as RenderTexture).format; if (texture is Texture2D) { var inFormat = ((Texture2D)texture).format; RenderTextureFormat outFormat; if (!s_FormatMap.TryGetValue(inFormat, out outFormat)) throw new NotSupportedException("Texture format not supported"); return outFormat; } return RenderTextureFormat.Default; } } }
93
orrb
openai
C#
using System.Collections.Generic; using UnityEngine.Assertions; namespace UnityEngine.Rendering.PostProcessing { class TextureLerper { static TextureLerper m_Instance; internal static TextureLerper instance { get { if (m_Instance == null) m_Instance = new TextureLerper(); return m_Instance; } } CommandBuffer m_Command; PropertySheetFactory m_PropertySheets; PostProcessResources m_Resources; List<RenderTexture> m_Recycled; List<RenderTexture> m_Actives; TextureLerper() { m_Recycled = new List<RenderTexture>(); m_Actives = new List<RenderTexture>(); } internal void BeginFrame(PostProcessRenderContext context) { m_Command = context.command; m_PropertySheets = context.propertySheets; m_Resources = context.resources; } internal void EndFrame() { // Release any remaining RT in the recycled list if (m_Recycled.Count > 0) { foreach (var rt in m_Recycled) RuntimeUtilities.Destroy(rt); m_Recycled.Clear(); } // There's a high probability that RTs will be requested in the same order on next // frame so keep them in the same order if (m_Actives.Count > 0) { foreach (var rt in m_Actives) m_Recycled.Add(rt); m_Actives.Clear(); } } RenderTexture Get(RenderTextureFormat format, int w, int h, int d = 1, bool enableRandomWrite = false) { RenderTexture rt = null; int i, len = m_Recycled.Count; for (i = 0; i < len; i++) { var r = m_Recycled[i]; if (r.width == w && r.height == h && r.volumeDepth == d && r.format == format && r.enableRandomWrite == enableRandomWrite) { rt = r; break; } } if (rt == null) { var dimension = d > 1 ? TextureDimension.Tex3D : TextureDimension.Tex2D; rt = new RenderTexture(w, h, d, format) { filterMode = FilterMode.Bilinear, wrapMode = TextureWrapMode.Clamp, anisoLevel = 0, volumeDepth = d, enableRandomWrite = enableRandomWrite, dimension = dimension }; rt.Create(); } else m_Recycled.RemoveAt(i); m_Actives.Add(rt); return rt; } internal Texture Lerp(Texture from, Texture to, float t) { Assert.IsNotNull(from); Assert.IsNotNull(to); bool is3d = to is Texture3D || (to is RenderTexture && ((RenderTexture)to).volumeDepth > 1); RenderTexture rt = null; if (is3d) { int size = to.width; rt = Get(RenderTextureFormat.ARGBHalf, size, size, size, true); var compute = m_Resources.computeShaders.texture3dLerp; int kernel = compute.FindKernel("KTexture3DLerp"); m_Command.SetComputeVectorParam(compute, "_Params", new Vector4(t, size, 0f, 0f)); m_Command.SetComputeTextureParam(compute, kernel, "_Output", rt); m_Command.SetComputeTextureParam(compute, kernel, "_From", from); m_Command.SetComputeTextureParam(compute, kernel, "_To", to); int groupSizeXY = Mathf.CeilToInt(size / 8f); int groupSizeZ = Mathf.CeilToInt(size / (RuntimeUtilities.isAndroidOpenGL ? 2f : 8f)); m_Command.DispatchCompute(compute, kernel, groupSizeXY, groupSizeXY, groupSizeZ); } else { var format = TextureFormatUtilities.GetUncompressedRenderTextureFormat(to); rt = Get(format, to.width, to.height); var sheet = m_PropertySheets.Get(m_Resources.shaders.texture2dLerp); sheet.properties.SetTexture(ShaderIDs.To, to); sheet.properties.SetFloat(ShaderIDs.Interp, t); m_Command.BlitFullscreenTriangle(from, rt, sheet, 0); } return rt; } internal void Clear() { foreach (var rt in m_Actives) RuntimeUtilities.Destroy(rt); foreach (var rt in m_Recycled) RuntimeUtilities.Destroy(rt); m_Actives.Clear(); m_Recycled.Clear(); } } }
154
orrb
openai
C#
// Small shim for VRSettings/XRSettings on XboxOne and Switch #if !UNITY_2018_2_OR_NEWER && (UNITY_XBOXONE || UNITY_SWITCH) && !UNITY_EDITOR using System; namespace UnityEngine.XR { #if UNITY_2017_2_OR_NEWER public static class XRSettings #elif UNITY_5_6_OR_NEWER public static class VRSettings #endif { public static bool enabled { get; set; } public static bool isDeviceActive { get; private set; } public static bool showDeviceView { get; set; } [Obsolete("renderScale is deprecated, use XRSettings.eyeTextureResolutionScale instead (UnityUpgradable) -> eyeTextureResolutionScale")] public static float renderScale { get; set; } public static float eyeTextureResolutionScale { get; set; } public static int eyeTextureWidth { get; private set; } public static int eyeTextureHeight { get; private set; } public static RenderTextureDescriptor eyeTextureDesc { get; private set; } public static float renderViewportScale { get; set; } public static float occlusionMaskScale { get; set; } public static bool useOcclusionMesh { get; set; } public static string loadedDeviceName { get; private set; } public static string[] supportedDevices { get; private set; } public static void LoadDeviceByName(string deviceName) { } public static void LoadDeviceByName(string[] prioritizedDeviceNameList) { } } } #endif
32
orrb
openai
C#
using System; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Reflection; using UnityEngine; // The ComponentManager is a instantiates, updates and runs the // RendererComponents. It is attached to the scene instance. It // can be configured with RendererComponentConfig protos. The // RendererComponent is responsible for rendering the components // GUI in the interactive mode, and can produce a dump of the config. public class ComponentManager : MonoBehaviour { // A IComponentBuilder knows how to build a RendererComponent // and attach it to a GameObject. private interface IComponentBuilder { RendererComponent Build(GameObject game_object); } // Template RendererComponent factory. private class ComponentBuilder<T> : IComponentBuilder where T : RendererComponent { public RendererComponent Build(GameObject game_object) { return game_object.AddComponent<T>(); } } private static Dictionary<string, IComponentBuilder> builders_ = DefaultBuilders(); // Here the supported RendererComponents are registered with // their names. private static Dictionary<string, IComponentBuilder> DefaultBuilders() { Assembly assembly = Assembly.GetExecutingAssembly(); IEnumerable<Type> renderer_component_types = assembly.GetTypes().Where(typeof(RendererComponent).IsAssignableFrom) .Where(t => typeof(RendererComponent) != t); Dictionary<string, IComponentBuilder> builders = new Dictionary<string, IComponentBuilder>(); Type builder_type = typeof(ComponentBuilder<>); foreach (Type renderer_component_type in renderer_component_types) { builders.Add( renderer_component_type.Name, Activator.CreateInstance( builder_type.MakeGenericType(renderer_component_type)) as IComponentBuilder); } return builders; } // A live component. [System.Serializable] public class ComponentInstance { [SerializeField] public RendererComponent renderer_component; [SerializeField] public string type; [SerializeField] public string name; [SerializeField] public string path; [SerializeField] public bool enabled; [SerializeField] public bool hidden; public ComponentInstance(RendererComponent renderer_component, string type, string name, string path, bool enabled) { this.renderer_component = renderer_component; this.type = type; this.name = name; this.path = path; this.enabled = enabled; this.hidden = true; } } [SerializeField] public List<ComponentInstance> components_ = new List<ComponentInstance>(); private Dictionary<string, ComponentInstance> components_dictionary_ = new Dictionary<string, ComponentInstance>(); private static char[] path_separator_ = new char[] { '/' }; private Vector2 scroll_position_ = Vector2.zero; // Traverse the kinetic hierarchy in order to find the object. private static GameObject FindSubject(GameObject parent, string path) { if (path == null || path.Length == 0) { return parent; } string[] split = path.Split(path_separator_, 2, System.StringSplitOptions.RemoveEmptyEntries); Transform child = parent.transform.Find(split[0]); if (child == null) { Logger.Error("ComponentManager::FindSubject::Cannot find: {0}", split[0]); return null; } if (split.Length == 1) { return child.gameObject; } else if (split.Length == 2) { return FindSubject(child.gameObject, split[1]); } Logger.Error(""); return null; } // Instantiate a component of a given type and name in an object // that is located based on the component_path. private ComponentInstance InstantiateComponentByType(string type, string component_name, string component_path, bool component_enabled) { GameObject subject = FindSubject(this.gameObject, component_path); if (builders_.ContainsKey(type)) { RendererComponent renderer_component = builders_[type].Build(subject); if (renderer_component == null) { Logger.Error("ComponentManager::InstantiateComponentByType::Builder returned null for: {0} type: {1}.", component_name, type); return null; } return new ComponentInstance(renderer_component, type, component_name, component_path, component_enabled); } Logger.Error("ComponentManager::InstantiateComponentByType::Unknown component type: {0}.", type); return null; } // Instantiate a component of a given type, name and config in the scene. public bool AddComponent(string type, string name, string path, Orrb.RendererComponentConfig config, bool enabled) { if (components_dictionary_.ContainsKey(name)) { Logger.Error("ComponentManager::AddComponent::Component already exists: {0}.", name); return false; } ComponentInstance component_instance = InstantiateComponentByType(type, name, path, enabled); if (component_instance == null) { Logger.Error("ComponentManager::AddComponent::Failed to instantiate component: {0} ({1}).", name, type); return false; } if (!component_instance.renderer_component.InitializeComponent(config)) { Logger.Error("ComponentManager::AddComponent::Failed to initialize: {0} of type: {1}.", name, type); return false; } components_.Add(component_instance); components_dictionary_.Add(name, component_instance); return true; } public bool RemoveComponent(string name) { return false; } public bool UpdateComponent(string name, Orrb.RendererComponentConfig config) { if (components_dictionary_.ContainsKey(name)) { return components_dictionary_[name].renderer_component.UpdateComponent(config); } else { Logger.Warning("ComponentManager::UpdateComponent::Cannot find component: {0}.", name); return false; } } public bool RunComponents(RendererComponent.IOutputContext context) { foreach (ComponentInstance component_instance in components_) { if (component_instance.enabled) { if (!component_instance.renderer_component.RunComponent(context)) { Logger.Warning("ComponentManager::RunComponents::Failed running: {0}.", component_instance.name); } } } return true; } public void DrawEditorGUI() { scroll_position_ = GUILayout.BeginScrollView(scroll_position_, GUILayout.ExpandWidth(true)); GUILayout.BeginVertical(); foreach (ComponentInstance component_instance in components_) { GUILayout.BeginHorizontal(); if (GUILayout.Button(component_instance.hidden ? "+" : "-", GUILayout.Width(20))) { component_instance.hidden = !component_instance.hidden; } if (GUILayout.Button("Run", GUILayout.Width(40))) { component_instance.renderer_component.RunComponent(new RendererComponent.NullOutputContext()); } component_instance.enabled = GUILayout.Toggle(component_instance.enabled, string.Format(" {0}", component_instance.name), GUILayout.ExpandWidth(true)); GUILayout.EndHorizontal(); if (!component_instance.hidden) { GUILayout.BeginHorizontal(); component_instance.renderer_component.DrawEditorGUI(); GUILayout.FlexibleSpace(); GUILayout.EndHorizontal(); } GUILayout.Space(3); } GUILayout.EndVertical(); GUILayout.EndScrollView(); } public void DrawSceneGUI() { foreach (ComponentInstance component_instance in components_) { if (component_instance.enabled) { component_instance.renderer_component.DrawSceneGUI(); } } } // Produce a RendererConfig based on current state of all the RendererComponents. public Orrb.RendererConfig GetConfig() { Orrb.RendererConfig config = new Orrb.RendererConfig(); foreach (ComponentInstance component_instance in components_) { Orrb.RendererComponent component_item = new Orrb.RendererComponent(); component_item.Name = component_instance.name; component_item.Type = component_instance.type; component_item.Path = component_instance.path; component_item.Config = component_instance.renderer_component.GetConfig(); config.Components.Add(component_item); } return config; } }
239
orrb
openai
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; public class RenderBatch { public class CameraBatch { public enum RenderType { RGB=0, DEPTH, NORMALS, SEGMENTATION } // mapping from render type to rendered camera images public Dictionary<RenderType, List<Texture2D>> images_ = new Dictionary<RenderType, List<Texture2D>>(); } // mapping from camera name to CameraBatch instance public Dictionary<string, CameraBatch> camera_batches_ = new Dictionary<string, CameraBatch>(); } // Interface for all the consumers that receive batches of rendered images. public interface IImageBatchConsumer { void ConsumeImageBatch(RenderBatch batch); }
27
orrb
openai
C#
using System.Collections; using System.Collections.Generic; using System.IO; using System.Threading; using System.Threading.Tasks; using UnityEngine; // ImageDumper consumes rendered batches and saves them to disk as // images. It uses Parallel.ForEach to speed up compute and I/O bound // operations. // // Configurable flags: // int max_worker_threads - how many images to process in parallel, // int offset - start with this number when naming images, // string dir - save dir relative to CWD, // enum format - PNG, JPG or EXR, public class ImageDumper : MonoBehaviour, IImageBatchConsumer { public enum Format { PNG, JPEG, EXR }; [SerializeField] [Flag] public int max_worker_threads_ = 10; [SerializeField] [Flag] public int offset_ = 0; [SerializeField] [Flag] public string dir_ = "output"; [SerializeField] [Flag] public Format format_ = Format.PNG; [HideInInspector] public int batch_count_ = 0; private Dictionary<string, int> offsets_ = new Dictionary<string, int>(); // Use this for initialization void Start() { Flags.InitFlags(this, "image_dumper"); } public bool Initialize() { Directory.CreateDirectory(dir_); return true; } private class ImageToSave { public Texture2D image_ = null; public string path_ = ""; public ImageToSave(Texture2D image, string path) { image_ = image; path_ = path; } } public void ConsumeImageBatch(RenderBatch batch) { List<ImageToSave> flat_batch = PrepareBatch(batch); Parallel.ForEach( flat_batch, GetParallelOptions(), image_to_save => { byte[] bytes = Encode(image_to_save.image_); using (FileStream stream = new FileStream(image_to_save.path_, FileMode.Create, FileAccess.Write)) { stream.Write(bytes, 0, bytes.Length); } }); batch_count_++; } private byte[] Encode(Texture2D image) { switch (format_) { case Format.PNG: return image.EncodeToPNG(); case Format.JPEG: return image.EncodeToJPG(95); case Format.EXR: return image.EncodeToEXR(); default: return new byte[0]; } } private ParallelOptions GetParallelOptions() { ParallelOptions options = new ParallelOptions(); options.MaxDegreeOfParallelism = max_worker_threads_; return options; } // Prepare a flat list of work items. private List<ImageToSave> PrepareBatch(RenderBatch batch) { List<ImageToSave> flat_batch = new List<ImageToSave>(); foreach (KeyValuePair<string, RenderBatch.CameraBatch> pair in batch.camera_batches_) { int offset = GetOffset(pair.Key); List<Texture2D> rgb_data = pair.Value.images_[RenderBatch.CameraBatch.RenderType.RGB]; for (int i = 0; i < rgb_data.Count; ++i) { flat_batch.Add(new ImageToSave(rgb_data[i], GetSavePath(pair.Key, i + offset))); } // Move the offsets, naming for the next batch starts where the // previous ended. UpdateOffset(pair.Key, offset + rgb_data.Count); } return flat_batch; } private int GetOffset(string stream_name) { if (offsets_.ContainsKey(stream_name)) { return offsets_[stream_name]; } else { return offset_; } } private void UpdateOffset(string stream_name, int offset) { offsets_[stream_name] = offset; } private string GetSavePath(string stream_name, int index) { return string.Format("{0}/{1}_{2:D7}.{3}", dir_, stream_name, index, GetFormat()); } private string GetFormat() { switch (format_) { case Format.PNG: return "png"; case Format.JPEG: return "jpeg"; case Format.EXR: return "exr"; default: return "error"; } } }
143
orrb
openai
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; using System.Threading; using System.IO; using UnityEngine.Rendering.PostProcessing; using UnityEngine.Rendering; // The Recorder works in server / offline mode. It is responsible for // preparing offscreen buffers that are used for batch rendering, issuing // the actual render call and finally pulling the images from the GPU. // To avoid GPU/CPU stalls and unnecessary data dependencies, there is // a long pipeline of images in flight. The Recorder keeps a large number // of render targets and destination textures, that are accessed in // a round robin fashion. // // Configurable flags: // int capture_width - default width for the captured image, // int capture_height - default height for the captured image, // bool capture_alpha - should capture transparency by default, // int capture_fps - deprecated, // float speedup - deprecated, // int batch_size - default batch size. public class Recorder : MonoBehaviour { [SerializeField] [Flag] public int capture_width_ = 512; [SerializeField] [Flag] public int capture_height_ = 512; [SerializeField] [Flag] public bool capture_alpha_ = false; [SerializeField] [Flag] public int capture_fps_ = 30; [SerializeField] [Flag] public float speedup_ = 10.0f; [SerializeField] [Flag] public int batch_size_ = 200; [SerializeField] public IImageBatchConsumer batch_consumer_ = null; public RenderTexture last_render_ = null; private int render_texture_rgb_index_ = -1; private int render_texture_depth_index_ = -1; private int capture_count_ = 0; private int capture_total_count_ = 0; private bool capture_next_frame_ = false; private int batch_count_ = 0; private bool capture_depth_ = false; private bool capture_normals_ = false; // This structure holds the batch textures for a single camera that // is capturing. [System.Serializable] public class CameraSetup { public enum CameraType { RGB = 0, DEPTH_NORMALS, SEGMENTATION, } public Camera camera_ = null; public List<Texture2D> captured_images_ = new List<Texture2D>(); public CameraType camera_type_ = CameraType.RGB; public string camera_name_ = null; private int width_ = 0; private int height_ = 0; private int batch_size_ = 0; private bool alpha_ = false; public CameraSetup(Camera camera, int batch_size, int width, int height, bool alpha, CameraType camera_type) { width_ = width; height_ = height; batch_size_ = batch_size; alpha_ = alpha; camera_type_ = camera_type; camera_name_ = camera.name; camera_ = PrepareCamera(camera); ResetBatch(camera, batch_size, width, height, alpha, camera_type); } // Set the current render texture. public void UpdateRenderTexture(RenderTexture next_render_texture) { camera_.targetTexture = next_render_texture; } // Issue framebuffer to texture asynchronous blit. public RenderTexture CaptureRenderTexture(int index) { Texture2D next_capture = captured_images_[index]; RenderTexture.active = camera_.targetTexture; next_capture.ReadPixels(new Rect(0, 0, next_capture.width, next_capture.height), 0, 0); return camera_.targetTexture; } // Commit and apply any not commited textures, this might be blocking, // so do it at the end, when the whole batch is ready and data stalls // would be least significant. public List<Texture2D> ApplyAndGetImages() { foreach (Texture2D texture in captured_images_) { texture.Apply(); } List<Texture2D> images = new List<Texture2D>(); for (int i = 0; i < batch_size_; ++i) { images.Add(captured_images_[i]); } return images; } // Make sure we have enough of big enough render textures for new // batch parameters. public void ResetBatch(Camera camera, int batch_size, int width, int height, bool alpha, CameraSetup.CameraType camera_type) { if (width != width_ || height != height_ || alpha != alpha_) { captured_images_.Clear(); width_ = width; height_ = height; alpha_ = alpha; } if (camera_type_ != camera_type || camera.name != camera_name_) { camera_type_ = camera_type; camera_name_ = camera.name; camera_ = PrepareCamera(camera); } batch_size_ = batch_size; if (captured_images_.Count > 2 * batch_size && batch_size >= 32) { captured_images_.RemoveRange(batch_size, captured_images_.Count - batch_size); return; } while (captured_images_.Count < batch_size) { TextureFormat format; if (camera_type_ == CameraType.DEPTH_NORMALS) { format = TextureFormat.RGBAFloat; } else { format = alpha ? TextureFormat.RGBA32 : TextureFormat.RGB24; } Texture2D capture_image = new Texture2D(width, height, format, false); captured_images_.Add(capture_image); } } public void CleanUp() { captured_images_.Clear(); batch_size_ = 0; } private Camera PrepareCamera(Camera camera) { // Save original camera name since it is modified for segmentation if (camera_type_ != CameraType.RGB) { return CreateSegmentationCamera(camera); } else { return camera; } } private Camera CreateSegmentationCamera(Camera camera_template) { Camera cam = Instantiate(camera_template); cam.name = camera_template.name + " (Segmentation)"; cam.transform.parent = camera_template.transform; cam.transform.localPosition = Vector3.zero; cam.transform.localRotation = Quaternion.identity; cam.transform.localScale = Vector3.one; Destroy(cam.GetComponent<PostProcessVolume>()); Destroy(cam.GetComponent<PostProcessLayer>()); var cb = new CommandBuffer(); switch (camera_type_) { case CameraType.SEGMENTATION: cb.SetGlobalFloat("_OutputMode", 0); break; case CameraType.DEPTH_NORMALS: cb.SetGlobalFloat("_OutputMode", 1); break; default: break; } cam.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, cb); cam.AddCommandBuffer(CameraEvent.BeforeFinalPass, cb); cam.backgroundColor = Color.black; cam.clearFlags = CameraClearFlags.SolidColor; cam.renderingPath = RenderingPath.Forward; cam.SetReplacementShader(Shader.Find("Hidden/LabelShader"), ""); return cam; } }; public Dictionary<CameraSetup.CameraType, List<CameraSetup>> camera_setups_ = new Dictionary<CameraSetup.CameraType, List<CameraSetup>>(); // RGB (and optionally Alpha, so RGBA) textures used for appearance public List<RenderTexture> render_textures_rgb_ = new List<RenderTexture>(); // Depth textures used for depth and surface normals public List<RenderTexture> render_textures_depth_ = new List<RenderTexture>(); // Use this for initialization void Start() { Flags.InitFlags(this, "recorder"); } public bool Initialize(IImageBatchConsumer batch_consumer) { Screen.SetResolution(capture_width_, capture_height_, false); Time.captureFramerate = (int)(speedup_ * capture_fps_); Time.timeScale = speedup_; batch_consumer_ = batch_consumer; PrepareRenderTextures(); ResetBatch(new List<Camera>(), batch_size_, capture_width_, capture_height_, capture_alpha_, false, false, false); StartCoroutine(RenderCaptureHook()); Logger.Info("Recorder::Initialize::Capture ready."); return true; } public void ResetBatch(IList<Camera> capture_cameras, int batch_size, int width, int height, bool alpha, bool depth, bool normals, bool segmentation) { HashSet<Recorder.CameraSetup.CameraType> camera_types = new HashSet<Recorder.CameraSetup.CameraType>(); camera_types.Add(Recorder.CameraSetup.CameraType.RGB); if (depth || normals) { camera_types.Add(Recorder.CameraSetup.CameraType.DEPTH_NORMALS); } if (segmentation) { camera_types.Add(Recorder.CameraSetup.CameraType.SEGMENTATION); } foreach (CameraSetup.CameraType camera_type in System.Enum.GetValues(typeof(CameraSetup.CameraType))) { if (!camera_types.Contains(camera_type)) { camera_setups_.Remove(camera_type); continue; } if (!camera_setups_.ContainsKey(camera_type)) { camera_setups_.Add(camera_type, new List<CameraSetup>()); } for (int i = 0; i < capture_cameras.Count; ++i) { if (camera_setups_[camera_type].Count < i + 1) { camera_setups_[camera_type].Add(new CameraSetup(capture_cameras[i], batch_size, width, height, alpha, camera_type)); } else { camera_setups_[camera_type][i].ResetBatch(capture_cameras[i], batch_size, width, height, alpha, camera_type); } } } if (capture_width_ != width || capture_height_ != height || capture_alpha_ != alpha) { capture_width_ = width; capture_height_ = height; capture_alpha_ = alpha; PrepareRenderTextures(); } capture_depth_ = depth; capture_normals_ = normals; batch_size_ = batch_size; RoundRobinRenderTextures(); capture_count_ = 0; capture_next_frame_ = false; } private void PrepareRenderTextures() { PrepareRenderTextures(30, render_textures_rgb_, RenderTextureFormat.Default); PrepareRenderTextures(30, render_textures_depth_, RenderTextureFormat.ARGBFloat); } // Prepare a pool of render textures. private void PrepareRenderTextures(int rt_count, List<RenderTexture> texture_list, RenderTextureFormat format) { texture_list.Clear(); for (int i = 0; i < rt_count; ++i) { RenderTexture texture = new RenderTexture(capture_width_, capture_height_, 24, format, RenderTextureReadWrite.sRGB); texture.Create(); texture_list.Add(texture); } } // Cycle through the cameras and the pool of render textures in a round // robin fashion (to avoid data stalls). private void RoundRobinRenderTextures() { foreach (List<CameraSetup> camera_setups in camera_setups_.Values) { foreach (CameraSetup camera_setup in camera_setups) { camera_setup.UpdateRenderTexture(NextRenderTexture(camera_setup.camera_type_)); } } } private RenderTexture NextRenderTexture(CameraSetup.CameraType camera_type) { if (camera_type == CameraSetup.CameraType.DEPTH_NORMALS) { render_texture_depth_index_ = (render_texture_depth_index_ + 1) % render_textures_depth_.Count; return render_textures_depth_[render_texture_depth_index_]; } else { render_texture_rgb_index_ = (render_texture_rgb_index_ + 1) % render_textures_rgb_.Count; return render_textures_rgb_[render_texture_rgb_index_]; } } // Issue render request to all capturing cameras. public void Capture() { // We will be capturing this frame. Flip a flag to let the coroutine know. capture_next_frame_ = true; foreach (List<CameraSetup> camera_setups in camera_setups_.Values) { foreach (CameraSetup camera_setup in camera_setups) { camera_setup.camera_.Render(); } } } // This coroutine is hooked to the event that notifies when the rendering // (which is asynchronous) is done. public IEnumerator RenderCaptureHook() { while (true) { yield return new WaitForEndOfFrame(); // Are we capturing this frame? if (!capture_next_frame_) { continue; } capture_next_frame_ = false; CaptureRenderTextures(); RoundRobinRenderTextures(); // The whole batch is done, apply, DMA and inform the consumers. if (capture_count_ == batch_size_) { ProcessBatch(); } } } private void CaptureRenderTextures() { foreach (List<CameraSetup> camera_setups in camera_setups_.Values) { foreach (CameraSetup camera_setup in camera_setups) { last_render_ = camera_setup.CaptureRenderTexture(capture_count_); } } capture_count_++; capture_total_count_++; } private void ProcessBatch() { RenderBatch batch = new RenderBatch(); foreach (List<CameraSetup> camera_setups in camera_setups_.Values) { foreach (CameraSetup camera_setup in camera_setups) { RenderBatch.CameraBatch camera_batch; if (batch.camera_batches_.ContainsKey(camera_setup.camera_name_)) { camera_batch = batch.camera_batches_[camera_setup.camera_name_]; } else { camera_batch = new RenderBatch.CameraBatch(); batch.camera_batches_.Add(camera_setup.camera_name_, camera_batch); } switch (camera_setup.camera_type_) { case CameraSetup.CameraType.RGB: camera_batch.images_.Add(RenderBatch.CameraBatch.RenderType.RGB, camera_setup.ApplyAndGetImages()); break; case CameraSetup.CameraType.SEGMENTATION: camera_batch.images_.Add(RenderBatch.CameraBatch.RenderType.SEGMENTATION, camera_setup.ApplyAndGetImages()); break; case CameraSetup.CameraType.DEPTH_NORMALS: List<Texture2D> images = camera_setup.ApplyAndGetImages(); if (capture_depth_) { camera_batch.images_.Add(RenderBatch.CameraBatch.RenderType.DEPTH, images); } if (capture_normals_) { camera_batch.images_.Add(RenderBatch.CameraBatch.RenderType.NORMALS, images); } break; } } } batch_consumer_.ConsumeImageBatch(batch); capture_count_ = 0; batch_count_++; } }
405
orrb
openai
C#
using System; using System.Collections; using System.Collections.Generic; using UnityEngine; // The RendererComponent is a abstract base class for all the components // that modify, augment and randomize the scene and visual appearance // of the rendered image. public abstract class RendererComponent : MonoBehaviour { // An OutputContext can be passed when running a component, it // is used to record auxiliary outputs, other than the rendered images. public interface IOutputContext { void OutputInt(string output_name, int value); void OutputInts(string output_name, int[] values); void OutputFloat(string output_name, float value); void OutputFloats(string output_name, float[] values); void OutputBool(string output_name, bool value); void OutputBools(string output_name, bool[] values); }; // This OutputContext just ignores the auxiliary outputs. public class NullOutputContext : RendererComponent.IOutputContext { public void OutputBool(string output_name, bool value) { } public void OutputBools(string output_name, bool[] values) { } public void OutputFloat(string output_name, float value) { } public void OutputFloats(string output_name, float[] values) { } public void OutputInt(string output_name, int value) { } public void OutputInts(string output_name, int[] values) { } } public virtual bool InitializeComponent(Orrb.RendererComponentConfig config) { return UpdateComponent(config); } // By default just pull the configurable properties from the config. public virtual bool UpdateComponent(Orrb.RendererComponentConfig config) { ConfigUtils.GetProperties(this, config); return true; } public abstract bool RunComponent(IOutputContext context); public abstract void DrawEditorGUI(); public virtual void DrawSceneGUI() { } public virtual Orrb.RendererComponentConfig GetConfig() { Orrb.RendererComponentConfig config = new Orrb.RendererComponentConfig(); ConfigUtils.SetProperties(this, config); return config; } private static string Truncate(string text, int length) { if (text.Length > length) { return text.Substring(0, length) + "..."; } else { return text; } } // These helper functions are used to generate the gui in the // interactive mode. public static void GUISlider(string label, ref float slider_value, float min, float max) { GUILayout.BeginHorizontal(); GUILayout.Label(Truncate(label, 20), GUILayout.Width(120)); try { slider_value = float.Parse(GUILayout.TextField(string.Format("{0:0.0##########}", slider_value), GUILayout.Width(80))); } catch { } slider_value = GUILayout.HorizontalSlider(slider_value, min, max, GUILayout.MaxWidth(170)); GUILayout.EndHorizontal(); } public static void GUIToggle(string label, ref bool toggle_value) { GUILayout.BeginHorizontal(); GUILayout.Label(Truncate(label, 20), GUILayout.Width(120)); toggle_value = GUILayout.Toggle(toggle_value, ""); GUILayout.EndHorizontal(); } public static bool GUIField(string label, ref string field_value) { GUILayout.BeginHorizontal(); GUILayout.Label(Truncate(label, 20), GUILayout.Width(120)); string old_value = field_value; field_value = GUILayout.TextField(field_value, GUILayout.MaxWidth(250)); GUILayout.EndHorizontal(); return !old_value.Equals(field_value); } public static void GUIHorizontalLine(int height) { GUIStyle style = new GUIStyle(); style.normal.background = Texture2D.whiteTexture; style.margin = new RectOffset(4, 4, 6, 6); Color c = GUI.color; GUI.color = new Color(0.7f, 0.7f, 0.7f, 0.5f); GUILayout.Box(GUIContent.none, style, GUILayout.ExpandWidth(true), GUILayout.Height(height)); GUI.color = c; } public static bool GUIButton(string label) { return GUILayout.Button(label); } public static void GUIVector3(string label, ref Vector3 field_value) { float x = 0.0f; float y = 0.0f; float z = 0.0f; GUILayout.BeginHorizontal(); GUILayout.Label(Truncate(label, 20), GUILayout.Width(120)); try { x = float.Parse(GUILayout.TextField(string.Format("{0:0.0######}", field_value.x), GUILayout.Width(80))); } catch { } try { y = float.Parse(GUILayout.TextField(string.Format("{0:0.0######}", field_value.y), GUILayout.Width(80))); } catch { } try { z = float.Parse(GUILayout.TextField(string.Format("{0:0.0######}", field_value.z), GUILayout.Width(80))); } catch { } field_value = new Vector3(x, y, z); GUILayout.EndHorizontal(); } public static void GUIVector3Sliders(string label, ref Vector3 field_value) { float x = 0.0f; float y = 0.0f; float z = 0.0f; GUILayout.BeginVertical(); GUILayout.Label(Truncate(label, 40), GUILayout.Width(400)); x = GUILayout.HorizontalSlider(field_value.x, -0.5f, 0.5f, GUILayout.Width(400)); y = GUILayout.HorizontalSlider(field_value.y, -0.5f, 0.5f, GUILayout.Width(400)); z = GUILayout.HorizontalSlider(field_value.z, -0.5f, 0.5f, GUILayout.Width(400)); field_value = new Vector3(x, y, z); GUILayout.EndVertical(); } public static void GUIQuaternion(string label, ref Quaternion field_value) { float x = 0.0f; float y = 0.0f; float z = 0.0f; float w = 1.0f; GUILayout.BeginHorizontal(); GUILayout.Label(Truncate(label, 20), GUILayout.Width(120)); try { x = float.Parse(GUILayout.TextField(string.Format("{0:0.0######}", field_value.x), GUILayout.Width(60))); } catch { } try { y = float.Parse(GUILayout.TextField(string.Format("{0:0.0######}", field_value.y), GUILayout.Width(60))); } catch { } try { z = float.Parse(GUILayout.TextField(string.Format("{0:0.0######}", field_value.z), GUILayout.Width(60))); } catch { } try { w = float.Parse(GUILayout.TextField(string.Format("{0:0.0######}", field_value.w), GUILayout.Width(60))); } catch { } field_value = new Quaternion(x, y, z, w); GUILayout.EndHorizontal(); } public static void GUIColor(string label, ref Color field_value) { float r = field_value.r; float g = field_value.g; float b = field_value.b; float a = field_value.a; GUILayout.BeginHorizontal(); GUILayout.Label(Truncate(label, 20), GUILayout.Width(120)); try { r = float.Parse(GUILayout.TextField(string.Format("{0:0.0##########}", r), GUILayout.Width(80))); } catch { } r = GUILayout.HorizontalSlider(r, 0, 1, GUILayout.MaxWidth(170)); GUILayout.EndHorizontal(); GUILayout.BeginHorizontal(); GUILayout.Label("", GUILayout.Width(120)); try { g = float.Parse(GUILayout.TextField(string.Format("{0:0.0##########}", g), GUILayout.Width(80))); } catch { } g = GUILayout.HorizontalSlider(g, 0, 1, GUILayout.MaxWidth(170)); GUILayout.EndHorizontal(); GUILayout.BeginHorizontal(); GUILayout.Label("", GUILayout.Width(120)); try { b = float.Parse(GUILayout.TextField(string.Format("{0:0.0##########}", b), GUILayout.Width(80))); } catch { } b = GUILayout.HorizontalSlider(b, 0, 1, GUILayout.MaxWidth(170)); GUILayout.EndHorizontal(); GUILayout.BeginHorizontal(); GUILayout.Label("", GUILayout.Width(120)); try { a = float.Parse(GUILayout.TextField(string.Format("{0:0.0######}", a), GUILayout.Width(60))); } catch { } field_value = new Color(r, g, b, a); GUILayout.EndHorizontal(); } }
218
orrb
openai
C#
using System.Collections.Generic; using System.Threading.Tasks; using Grpc.Core; using UnityEngine; using StreamEntry = Orrb.RenderBatchResponse.Types.StreamEntry; using BatchResponseEntry = Orrb.RenderBatchResponse.Types.StreamEntry.Types.BatchResponseEntry; using Google.Protobuf; using System.Threading; using System.IO; using System; // The RenderServer starts a GRPC service and processes incoming // RenderBatch and UpdateRenderer requests. The GRPC servers are // inherently asynchronous and the Unity game loop is embarassingly // serial. To join those two worlds the GRPC server communicates with // the game loop over a concurrent queue. The incoming requests wait // on enqueued workloads. The game loop processes the workloads in // a serial fashion and then fulfills a Response promise. The queue // will spin for a little while and then use user space conditional // variables, in order to reduce context switching and provide highest // performance in high load scenarios. When idle the queue will wait // on the conditional variable with high timeout, reducing the idle // load significantly. // // Configurable flags: // int queues_count - GRPC completion queue count, // int workers_count - GRPC worker threads initial count, // int streams_count - GRPC concurrent streams max count, // int port - port to bind the render service to, // string host - host to bind the render service to. public class RenderServer : MonoBehaviour, IImageBatchConsumer { private interface IRenderServerWorkload { void InitializeWorkload(); // One frame of processing. void ProcessWorkload(); // Has this workload finished? bool WorkloadDone(); } private class QueuedWorkloadRequest<Request, Response> where Request : class { public RenderServer server_ = null; // The GRPC server hangs on this promise, signal it when the work // is done and the response can be sent back. public TaskCompletionSource<Response> response_promise_ = new TaskCompletionSource<Response>(); public Request request_ = null; public QueuedWorkloadRequest(RenderServer server, Request request) { server_ = server; request_ = request; } } // This structure gathers auxiliary outputs. It assumes that the per // frame output from a given stream will have constant size. It also // assumes that all frames generate the same output streams. private class BatchOutputContext : RendererComponent.IOutputContext { private int batch_size_ = 0; private int current_entry_ = 0; private class OutputStreams<T> where T : struct { private Dictionary<string, T[]> streams_ = new Dictionary<string, T[]>(); public Dictionary<string, T[]> GetStreams() { return streams_; } private bool EnsureStream(string stream_name, int stream_size, int entry, int entry_length) { if (streams_.ContainsKey(stream_name)) { T[] stream = streams_[stream_name]; if (stream.Length != stream_size * entry_length) { Logger.Error("RenderServer::BatchOutputContext::EnsureStream::Wrong size, {0} vs. {1}x{2}.", stream.Length, stream_size, entry_length); return false; } return true; } else if (entry == 0) { streams_.Add(stream_name, new T[stream_size * entry_length]); return true; } else { Logger.Error("RenderServer::BatchOutputContext::EnsureStream::Entry not 0, in initialize."); return false; } } public bool Output(string stream_name, int stream_size, int entry, T value) { if (!EnsureStream(stream_name, stream_size, entry, 1)) { Logger.Error("RenderServer::BatchOutputContext::Output::Cannot output to stream: {0}", stream_name); return false; } streams_[stream_name][entry] = value; return true; } public bool Output(string stream_name, int stream_size, int entry, T[] values) { if (!EnsureStream(stream_name, stream_size, entry, values.Length)) { Logger.Error("RenderServer::BatchOutputContext::Output::Cannot output to stream: {0}", stream_name); return false; } Array.Copy(values, 0, streams_[stream_name], values.Length * entry, values.Length); return true; } }; private OutputStreams<int> int_streams_ = null; private OutputStreams<float> float_streams_ = null; private OutputStreams<bool> bool_streams_ = null; public BatchOutputContext(int batch_size) { this.batch_size_ = batch_size; this.int_streams_ = new OutputStreams<int>(); this.float_streams_ = new OutputStreams<float>(); this.bool_streams_ = new OutputStreams<bool>(); } public void Advance() { this.current_entry_++; } public void OutputBool(string output_name, bool value) { bool_streams_.Output(output_name, batch_size_, current_entry_, value); } public void OutputBools(string output_name, bool[] values) { bool_streams_.Output(output_name, batch_size_, current_entry_, values); } public void OutputFloat(string output_name, float value) { float_streams_.Output(output_name, batch_size_, current_entry_, value); } public void OutputFloats(string output_name, float[] values) { float_streams_.Output(output_name, batch_size_, current_entry_, values); } public void OutputInt(string output_name, int value) { int_streams_.Output(output_name, batch_size_, current_entry_, value); } public void OutputInts(string output_name, int[] values) { int_streams_.Output(output_name, batch_size_, current_entry_, values); } public void BuildResponseStreams(Orrb.RenderBatchResponse response) { foreach (KeyValuePair<string, bool[]> bool_stream in bool_streams_.GetStreams()) { Orrb.RenderBatchResponse.Types.AuxiliaryBoolStreamEntry response_bool_stream = new Orrb.RenderBatchResponse.Types.AuxiliaryBoolStreamEntry(); response_bool_stream.Name = bool_stream.Key; response_bool_stream.Data.AddRange(bool_stream.Value); response.AuxiliaryBoolStreams.Add(response_bool_stream); } foreach (KeyValuePair<string, int[]> int_stream in int_streams_.GetStreams()) { Orrb.RenderBatchResponse.Types.AuxiliaryIntStreamEntry response_int_stream = new Orrb.RenderBatchResponse.Types.AuxiliaryIntStreamEntry(); response_int_stream.Name = int_stream.Key; response_int_stream.Data.AddRange(int_stream.Value); response.AuxiliaryIntStreams.Add(response_int_stream); } foreach (KeyValuePair<string, float[]> float_stream in float_streams_.GetStreams()) { Orrb.RenderBatchResponse.Types.AuxiliaryFloatStreamEntry response_float_stream = new Orrb.RenderBatchResponse.Types.AuxiliaryFloatStreamEntry(); response_float_stream.Name = float_stream.Key; response_float_stream.Data.AddRange(float_stream.Value); response.AuxiliaryFloatStreams.Add(response_float_stream); } } } // This workload runs the work triggered by a RenderBatch RPC. private class RenderBatchWorkload : QueuedWorkloadRequest<Orrb.RenderBatchRequest, Orrb.RenderBatchResponse>, IRenderServerWorkload, IImageBatchConsumer { private float start_time_ = 0.0f; private int current_batch_entry_ = 0; private BatchOutputContext output_context_ = null; private bool done_ = false; public RenderBatchWorkload(RenderServer server, Orrb.RenderBatchRequest request) : base(server, request) { } public void InitializeWorkload() { Logger.Info("RenderBatchWorkload::InitializeWorkload::New render request."); start_time_ = Time.realtimeSinceStartup; output_context_ = new BatchOutputContext(request_.Entries.Count); current_batch_entry_ = 0; List<Camera> cameras = server_.scene_instance_.GetCameras(request_.CameraNames); // Make sure we can find all the requested cameras in the scene. if (cameras.Count != request_.CameraNames.Count) { Logger.Error("RenderServer::RenderBatchWorkload::InitializeWorkload::Cannot find all requested cameras."); done_ = true; return; } // Prepare the recorder, so that it has buffers ready. server_.recorder_.ResetBatch(cameras, request_.Entries.Count, request_.Width, request_.Height, request_.RenderAlpha, request_.RenderDepth, request_.RenderNormals, request_.RenderSegmentation); } // Render one state (frame). public void ProcessWorkload() { int seed = request_.BatchSeed + current_batch_entry_; if (request_.UseEntrySeeds) { seed = request_.Entries[current_batch_entry_].Seed; } UnityEngine.Random.InitState(seed); server_.scene_instance_.UpdateState(request_.Entries[current_batch_entry_].Qpos); server_.scene_instance_.GetComponentManager().RunComponents(output_context_); output_context_.Advance(); server_.recorder_.Capture(); current_batch_entry_++; } public bool WorkloadDone() { return done_; } // The RenderBatchWorkload is also an ImageBatchConsumer, when the // Recorder is done it will send the batch here (through the RenderServer). public void ConsumeImageBatch(RenderBatch batch) { Orrb.RenderBatchResponse response = new Orrb.RenderBatchResponse(); // Build the GRPC response from the recorded images... int frames = 0; foreach (KeyValuePair<string, RenderBatch.CameraBatch> pair in batch.camera_batches_) { Tuple<int, StreamEntry> stream_info = StreamFromBatch(pair.Key, pair.Value); response.Streams.Add(stream_info.Item2); frames += stream_info.Item1; } // ... and the auxiliary outputs. output_context_.BuildResponseStreams(response); float delta_time = Time.realtimeSinceStartup - start_time_; response_promise_.SetResult(response); Logger.Info("RenderBatchWorkload::ConsumeImageBatch::Batch finished: {0} images in {1} ({2}).", frames, delta_time, frames / delta_time); done_ = true; } private static Tuple<int, StreamEntry> StreamFromBatch(string name, RenderBatch.CameraBatch batch_stream) { StreamEntry stream = new StreamEntry(); stream.Name = name; int count = 0; foreach (KeyValuePair<RenderBatch.CameraBatch.RenderType, List<Texture2D>> pair in batch_stream.images_) { int i = 0; foreach (Texture2D image in pair.Value) { BatchResponseEntry entry; if (i + 1 > stream.Entries.Count) { entry = new BatchResponseEntry(); stream.Entries.Add(entry); } else { entry = stream.Entries[i]; } ++i; ByteString data = null; switch (pair.Key) { case RenderBatch.CameraBatch.RenderType.RGB: data = ByteString.CopyFrom(image.GetRawTextureData()); entry.ImageData = data; break; case RenderBatch.CameraBatch.RenderType.DEPTH: entry.DepthData = ReadDepth(image); break; case RenderBatch.CameraBatch.RenderType.NORMALS: entry.NormalsData = ReadNormals(image); break; case RenderBatch.CameraBatch.RenderType.SEGMENTATION: entry.SegmentationData = ReadSegmentation(image); break; default: Logger.Error("Unsupported Batch.Stream.Type {0}", pair.Key); break; } } count = i; } Tuple<int, StreamEntry> stream_info = new Tuple<int, StreamEntry>(count, stream); return stream_info; } private static ByteString ReadDepth(Texture2D texture) { // Read depth from RGBAFloat texture where its stored in R channel int size = texture.width * texture.height; byte[] depth_array = new byte[size * 4]; // float32, so 4 bytes byte[] texture_array = texture.GetRawTextureData(); for (int i = 0; i < size; ++i) { for (int j = 0; j < 4; ++j) { depth_array[i * 4 + j] = texture_array[i * 16 + j]; } } return ByteString.CopyFrom(depth_array); } private static ByteString ReadNormals(Texture2D texture) { // Read surface normals from RGBAFloat texture where they're stored in GBA channels int size = texture.width * texture.height; byte[] normals_array = new byte[size * 3 * 4]; // 3 times float32, so 3 * 4 bytes byte[] texture_array = texture.GetRawTextureData(); for (int i = 0; i < size; ++i) { for (int j = 0; j < 12; ++j) { // offset by 4 since first channel is depth normals_array[i * 12 + j] = texture_array[i * 16 + 4 + j]; } } return ByteString.CopyFrom(normals_array); } private static ByteString ReadSegmentation(Texture2D texture) { int size = texture.width * texture.height; byte[] segmentation_array = new byte[size]; byte[] texture_array = texture.GetRawTextureData(); for (int i = 0; i < size; ++i) { segmentation_array[i] = texture_array[i * 3]; // 3 RGB channels } return ByteString.CopyFrom(segmentation_array); } } // This instant workload updates the RenderComponentConfigs of // Components in the ComponentManager. private class UpdateWorkload : QueuedWorkloadRequest<Orrb.UpdateRequest, Orrb.UpdateResponse>, IRenderServerWorkload { public UpdateWorkload(RenderServer server, Orrb.UpdateRequest request) : base(server, request) { } public void InitializeWorkload() { } public void ProcessWorkload() { ComponentManager manager = server_.scene_instance_.GetComponentManager(); foreach (Orrb.RendererComponent config in request_.Components) { manager.UpdateComponent(config.Name, config.Config); } JsonFormatter formatter = new JsonFormatter(JsonFormatter.Settings.Default); Logger.Info("UpdateWorkload::ProcessWorkload::New config after update: {0}", formatter.Format(manager.GetConfig())); response_promise_.SetResult(new Orrb.UpdateResponse()); } public bool WorkloadDone() { return true; } } // GRPC RenderService implementation, just a proxy that that delegates // to the parent RenderServer class. private class RenderServiceImpl : Orrb.RenderService.RenderServiceBase { private RenderServer server_ = null; public RenderServiceImpl(RenderServer server) { server_ = server; } public override Task<Orrb.RenderBatchResponse> RenderBatch(Orrb.RenderBatchRequest request, ServerCallContext context) { RenderBatchWorkload workload = new RenderBatchWorkload(server_, request); server_.EnqueueWorkload(workload); return workload.response_promise_.Task; } public override Task<Orrb.UpdateResponse> Update(Orrb.UpdateRequest request, ServerCallContext context) { UpdateWorkload workload = new UpdateWorkload(server_, request); server_.EnqueueWorkload(workload); return workload.response_promise_.Task; } } [SerializeField] [Flag] public int queues_count_ = 4; [SerializeField] [Flag] public int workers_count_ = 4; [SerializeField] [Flag] public int streams_count_ = 4; [SerializeField] [Flag] public int port_ = 6666; [SerializeField] [Flag] public string host_ = "[::]"; private Recorder recorder_ = null; private SceneInstance scene_instance_ = null; private RenderServiceImpl render_service_ = null; private Server server_ = null; private IRenderServerWorkload current_workload_ = null; private Queue<IRenderServerWorkload> queue_ = new Queue<IRenderServerWorkload>(); // Use this for initialization void Start() { Flags.InitFlags(this, "render_server"); } public bool Initialize(Recorder recorder, SceneInstance scene_instance) { recorder_ = recorder; scene_instance_ = scene_instance; GrpcEnvironment.SetThreadPoolSize(workers_count_); GrpcEnvironment.SetCompletionQueueCount(queues_count_); render_service_ = new RenderServiceImpl(this); // Port reuse is turned off, it was a frequent cause of hard // to debug complications. server_ = new Server( new[] { new ChannelOption(ChannelOptions.SoReuseport, 0), new ChannelOption(ChannelOptions.MaxConcurrentStreams, streams_count_) }) { Services = { Orrb.RenderService.BindService(render_service_) }, Ports = { new ServerPort(host_, port_, ServerCredentials.Insecure) } }; try { server_.Start(); } catch (IOException e) { Logger.Error("RenderServer::Initialize::Failed to initialize render server on: {0}:{1} ({2})", host_, port_, e.Message); return false; } Logger.Info("RenderServer::Initialize::Initialized render server on: {0}:{1}", host_, port_); return true; } // Main server loop. Process current workload, or if it is done // try to get a next one. public void ProcessRequests() { if (current_workload_ != null) { ProcessCurrentWorkload(); } else { IRenderServerWorkload next_workload = GetNextWorkload(); if (next_workload != null) { InitializeNewWorkload(next_workload); ProcessCurrentWorkload(); } } } // The GRPC service will use this to enqueue and notify the main // loop of new incoming work. private void EnqueueWorkload(IRenderServerWorkload workload) { lock (queue_) { queue_.Enqueue(workload); Monitor.Pulse(queue_); } } // The main loop will use this to pull a new workload from the queue, // or wait (in a blocking fashion) till next one comes. private IRenderServerWorkload GetNextWorkload() { lock (queue_) { int retries = 10; while (retries-- > 0) { if (queue_.Count > 0) { return queue_.Dequeue(); } // Yield on the queue, up to 100ms, this reduces idle // load when no work is pending. Monitor.Wait(queue_, 100); } } return null; } private void InitializeNewWorkload(IRenderServerWorkload new_workload) { current_workload_ = new_workload; current_workload_.InitializeWorkload(); } private void ProcessCurrentWorkload() { current_workload_.ProcessWorkload(); if (current_workload_.WorkloadDone()) { current_workload_ = null; } } // The RenderServer is a ImageBatchConsumer, when the Recorder is done // it will send a batch here. Pass it to the current workload if it is // a ImageBatchConsumer too. public void ConsumeImageBatch(RenderBatch batch) { if (current_workload_ != null && current_workload_ is IImageBatchConsumer) { (current_workload_ as IImageBatchConsumer).ConsumeImageBatch(batch); if (current_workload_.WorkloadDone()) { current_workload_ = null; } } else { Logger.Warning("RenderServer::ConsumeImageBatch::Unexpected image batch consume call."); } } }
509
orrb
openai
C#
using System.Collections.Generic; using UnityEngine; using System.Xml; using System.IO; using Parabox.STL; // This monstrosity loads a MuJoCo XML and rebuilds the robot. // It loads the required geoms from STL files and generates the // geometric primitives necessary. It recreates the joints, geometry, // cameras and sites defined in the XML. MuJoCo class inheritance // is supported for those entities. Far from everything in the MuJoCo // modelling reference is implemented. public class RobotLoader : KineticHierarchyController { public struct MeshDefinition { public string name; public Vector3 scale; public byte[] contents; public MeshDefinition(string name, Vector3 scale, byte[] contents) { this.name = name; this.scale = scale; this.contents = contents; } } public struct TextureDefinition { public string name; public byte[] contents; public TextureDefinition(string name, byte[] contents) { this.name = name; this.contents = contents; } } [SerializeField] public GameObject assembly_parts_ = null; [SerializeField] public Material default_material_ = null; [SerializeField] public bool convexify_meshes_ = false; private Dictionary<string, Material> materials_ = new Dictionary<string, Material>(); private int part_counter_ = 0; private int camera_counter_ = 0; private string asset_basedir_ = "."; private string mesh_dir_ = "."; private string texture_dir_ = "."; private List<string> geom_categories_ = new List<string>(); private GameObject mr_robot_ = null; // Main entry point, use the provided directory as base for // relative paths. public bool LoadRobot(string xml_file, string asset_basedir) { return LoadRobot(xml_file, asset_basedir, new List<TextureDefinition>(), new List<MeshDefinition>()); } public bool LoadRobot(string xml_file, string asset_basedir, IList<TextureDefinition> textures, IList<MeshDefinition> meshes) { this.asset_basedir_ = asset_basedir; xml_file = ConfigUtils.ResolveFile(asset_basedir_, xml_file); XmlDocument mr_robot_xml = new XmlDocument(); mr_robot_xml.Load(xml_file); // If the XML has includes, merge everything into one. ResolveXmlIncludes(Path.GetDirectoryName(xml_file), mr_robot_xml); XmlNode mujoco = XmlUtils.GetChildNode(mr_robot_xml, "mujoco"); if (mujoco == null) { Logger.Error("RobotLoader::LoadRobot::Cannot find mujoco node."); return false; } // Set up global properties. HandleCompilerDirectives(mujoco); // Preload textures and geom meshes. if (!PrepareAssets(mr_robot_xml, textures, meshes)) { Logger.Error("RobotLoader::LoadRobot::Failed to prepare assets."); return false; } XmlUtils.Defaults defaults = new XmlUtils.Defaults(XmlUtils.GetChildNode(mujoco, "default")); XmlNode worldbody = XmlUtils.GetChildNode(mujoco, "worldbody"); if (worldbody == null) { Logger.Error("RobotLoader::LoadRobot::No worldbody defined."); return false; } // Prepare geom names for semantic segmentation PrepareGeomCategories(worldbody); geom_categories_.Sort(); if (!AddRobotBody(this, worldbody, defaults)) { Logger.Error("RobotLoader::LoadRobot::Failed to build robot parts."); return false; } name = "robot"; // MuJoCo has a different world orientation than Unity. transform.localScale = new Vector3(-transform.localScale.x, transform.localScale.y, transform.localScale.z); return true; } private void PrepareGeomCategories(XmlNode node) { List<XmlNode> geom_nodes = XmlUtils.GetChildNodes(node, "geom"); foreach (XmlNode geom_node in geom_nodes) { string geom_category = GeomController.GetGeomCategoryFromXml(geom_node); if (geom_category != null && !geom_categories_.Contains(geom_category)) { geom_categories_.Add(geom_category); } } List<XmlNode> body_nodes = XmlUtils.GetChildNodes(node, "body"); foreach (XmlNode body_node in body_nodes) { PrepareGeomCategories(body_node); } } private void HandleCompilerDirectives(XmlNode node) { foreach (XmlNode child_node in node.ChildNodes) { HandleCompilerDirectives(child_node); } List<XmlNode> compiler_directives = XmlUtils.GetChildNodes(node, "compiler"); foreach (XmlNode compiler_directive in compiler_directives) { mesh_dir_ = XmlUtils.GetString(compiler_directive, "meshdir", mesh_dir_); texture_dir_ = XmlUtils.GetString(compiler_directive, "texturedir", texture_dir_); } } private bool PrepareAssets(XmlNode node, IList<TextureDefinition> textures, IList<MeshDefinition> meshes) { // Load assets that were passed directly in code. foreach (MeshDefinition mesh in meshes) { if (!LoadMeshDefinition(mesh)) { return false; } } foreach (TextureDefinition texture in textures) { if (!LoadTextureDefinition(texture)) { return false; } } // Load assets that are defined in the XML. return PrepareXmlAssets(node); } private bool LoadMeshDefinition(MeshDefinition mesh) { List<Mesh> meshes = pb_Stl_Importer.ImportBytes(mesh.contents); if (meshes == null) { Logger.Error("RobotLoader::LoadMeshDefinition::Cannot load: {} from MeshDefinition contents.", mesh.name); return false; } if (meshes.Count == 0) { Logger.Error("RobotLoader::LoadMeshDefinition::No meshes in MeshDefinition for: {}.", mesh.name); return false; } return CreateMeshAsset(assembly_parts_, mesh.name, mesh.scale, meshes); } private bool LoadTextureDefinition(TextureDefinition texture) { Logger.Warning("RobotLoader::LoadTextureDefinition::Ignoring texture: {0}.", texture.name); return true; } private bool PrepareXmlAssets(XmlNode node) { foreach (XmlNode child_node in node.ChildNodes) { if (!PrepareXmlAssets(child_node)) { return false; } } // Look in the <asset></asset> tag. List<XmlNode> asset_nodes = XmlUtils.GetChildNodes(node, "asset"); foreach (XmlNode asset_node in asset_nodes) { List<XmlNode> mesh_nodes = XmlUtils.GetChildNodes(asset_node, "mesh"); foreach (XmlNode mesh_node in mesh_nodes) { if (!LoadXmlMeshAsset(mesh_node)) { return false; } } List<XmlNode> material_nodes = XmlUtils.GetChildNodes(asset_node, "material"); foreach (XmlNode material_node in material_nodes) { if (!LoadXmlMaterialAsset(material_node)) { return false; } } } return true; } private bool LoadXmlMaterialAsset(XmlNode material_node) { string material_name = XmlUtils.GetString(material_node, "name", null); if (material_name == null) { Logger.Error("RobotLoader::LoadXmlMaterialAsset::Missing material name."); return false; } Material material = new Material(default_material_); material.name = material_name; if (material.HasProperty("_Color")) { material.color = XmlUtils.GetColor(material_node, "rgba", Color.grey); } if (material.HasProperty("_SpecColor")) { material.SetColor("_SpecColor", XmlUtils.GetColor(material_node, "specular_rgba", Color.grey)); } if (material.HasProperty("_Glossiness")) { material.SetFloat("_Glossiness", XmlUtils.GetFloat(material_node, "shininess", 0.5f)); } if (material.HasProperty("_Metallic")) { material.SetFloat("_Metallic", XmlUtils.GetFloat(material_node, "specular", 0.5f)); } // If the XML contains the emission parameter for this material, // turn emissive color on. float emission = XmlUtils.GetFloat(material_node, "emission", 0.0f); if (emission > 0.001f && material.HasProperty("_EmissionColor")) { Color finalEmission = emission * XmlUtils.GetColor(material_node, "rgba", Color.white); material.EnableKeyword("_EMISSION"); material.globalIlluminationFlags = MaterialGlobalIlluminationFlags.RealtimeEmissive; material.SetColor("_EmissionColor", finalEmission); } materials_.Add(material_name, material); return true; } private bool LoadXmlMeshAsset(XmlNode mesh_node) { string mesh_name = XmlUtils.GetString(mesh_node, "name", null); if (mesh_name == null) { Logger.Error("RobotLoader::LoadXmlMeshAsset::Missing mesh name."); return false; } string mesh_file = XmlUtils.GetString(mesh_node, "file", null); if (mesh_file == null) { Logger.Error("RobotLoader::LoadXmlMeshAsset::Missing mesh file for: {}.", mesh_name); return false; } string mesh_path = ConfigUtils.ResolveFile(asset_basedir_, Path.Combine(mesh_dir_, mesh_file)); // Import STL for this mesh. Unity has a 64k vertices per mesh // limit (in order to fit indexes in short) so one STL might // produce multiple Meshes. IList<Mesh> meshes = pb_Stl_Importer.Import(mesh_path); // We can globaly turn convexification on. MuJoCo convexifies all // geoms for collision detection, this mode can help debug / inspect it. if (convexify_meshes_) { List<Mesh> convex_meshes = new List<Mesh>(); foreach (Mesh mesh in meshes) { convex_meshes.Add(ConvexHull.CreateConvexHull(mesh)); } meshes = convex_meshes; } if (meshes == null) { Logger.Error("RobotLoader::LoadXmlMeshAsset::Cannot load: {} from: {}.", mesh_name, mesh_path); return false; } if (meshes.Count == 0) { Logger.Error("RobotLoader::LoadXmlMeshAsset::No meshes in: {} for: {}.", mesh_path, mesh_name); return false; } Vector3 scale = XmlUtils.GetVector3(mesh_node, "scale", Vector3.one); return CreateMeshAsset(assembly_parts_, mesh_name, scale, meshes); } // Take the mesh and its name and put the prefab asset in // a proper place in the prefab hierarchy. private bool CreateMeshAsset(GameObject parent, string asset_name, Vector3 scale, IList<Mesh> meshes) { string[] split = asset_name.Split(new char[] { ':' }, 2, System.StringSplitOptions.RemoveEmptyEntries); Transform asset_prefab_transform = parent.transform.Find(split[0]); GameObject asset_prefab = null; if (asset_prefab_transform != null) { asset_prefab = asset_prefab_transform.gameObject; } if (asset_prefab == null) { asset_prefab = new GameObject(split[0]); asset_prefab.transform.parent = parent.transform; asset_prefab.transform.localPosition = Vector3.zero; asset_prefab.transform.localRotation = Quaternion.identity; asset_prefab.transform.localScale = Vector3.one; asset_prefab.AddComponent<GeomController>(); } if (split.Length == 2) { return CreateMeshAsset(asset_prefab, split[1], scale, meshes); } else { asset_prefab.transform.localScale = scale; int counter = 0; foreach (Mesh mesh in meshes) { GameObject asset_mesh = new GameObject(string.Format("{0}_mesh_{1}", asset_name, counter++)); asset_mesh.transform.parent = asset_prefab.transform; asset_mesh.transform.localPosition = Vector3.zero; asset_mesh.transform.localRotation = Quaternion.identity; asset_mesh.transform.localScale = Vector3.one; asset_mesh.AddComponent<MeshFilter>().sharedMesh = mesh; asset_mesh.AddComponent<MeshRenderer>().sharedMaterial = default_material_; } return true; } } private void ResolveXmlIncludes(string root_dir, XmlNode node) { foreach (XmlNode child in node.ChildNodes) { ResolveXmlIncludes(root_dir, child); } List<XmlNode> includes = XmlUtils.GetChildNodes(node, "include"); foreach (XmlNode include in includes) { XmlDocument loaded_include = LoadInclude(root_dir, include); if (loaded_include == null) { Logger.Error("RobotLoader::ResolveXMLIncludes::Cannot load include."); continue; } node.RemoveChild(include); XmlNode mujoco_root_node = XmlUtils.GetChildNode(loaded_include, "mujocoinclude"); if (mujoco_root_node == null) { Logger.Error("RobotLoader::ResolveXmlIncludes::Included file needs a mujocoinclude node."); continue; } foreach (XmlNode included_child in mujoco_root_node.ChildNodes) { XmlNode imported_child = node.OwnerDocument.ImportNode(included_child, true); node.AppendChild(imported_child); } } } private XmlDocument LoadInclude(string root_dir, XmlNode include_node) { string file = XmlUtils.GetString(include_node, "file", null); if (file == null) { Logger.Error("RobotLoader::LoadInclude::Missing file attribute in include node."); return null; } XmlDocument included_document = new XmlDocument(); included_document.Load(Path.Combine(root_dir, file)); ResolveXmlIncludes(root_dir, included_document); return included_document; } // Get the XML with a default class for a given entity class_name. private XmlNode FindDefault(XmlNode node, string class_name) { List<XmlNode> default_nodes = XmlUtils.GetChildNodes(node, "default"); foreach (XmlNode default_node in default_nodes) { string default_class_name = XmlUtils.GetString(default_node, "class", null); if (class_name.Equals(default_class_name)) { return default_node; } } return null; } // Handle creation of a MuJoCo XML body / worldbody entity. private bool AddRobotBody(KineticHierarchyController parent, XmlNode part_xml, XmlUtils.Defaults defaults) { defaults = defaults.Resolve(XmlUtils.GetString(part_xml, "childclass", null)); KineticHierarchyController robot_part_attachment = BuildRobotBodyAttachment(parent, part_xml, defaults); return BuildRobotBodyChildren(robot_part_attachment, part_xml, defaults); } // Create the hierarchy that handles local transformations // and joints for a given body. private KineticHierarchyController BuildRobotBodyAttachment(KineticHierarchyController parent, XmlNode part_xml, XmlUtils.Defaults defaults) { string part_name = XmlUtils.GetString(part_xml, "name", string.Format("part_{0}", part_counter_)); part_counter_++; // Build the body and set local position/rotation/scale relative to parent. BodyController body = SceneUtils.InstantiateWithController<BodyController>(part_name); body.Initialize(parent, part_name, XmlUtils.GetVector3(part_xml, "pos", Vector3.zero), XmlUtils.GetRotation(part_xml, Quaternion.identity)); List<XmlNode> joint_nodes = XmlUtils.GetChildNodes(part_xml, "joint"); // Add all the joints in a hierarchy, one after another // (XML order is important). KineticHierarchyController last_game_object = body; for (int i = 0; i < joint_nodes.Count; ++i) { last_game_object = BuildJoint(last_game_object, part_name, i, joint_nodes[i], defaults); } return last_game_object; } // Recursively add geoms, cameras, sites and other bodies. private bool BuildRobotBodyChildren(KineticHierarchyController parent, XmlNode part_xml, XmlUtils.Defaults defaults) { List<XmlNode> geom_nodes = XmlUtils.GetChildNodes(part_xml, "geom"); foreach (XmlNode geom_node in geom_nodes) { if (!AddRobotGeom(parent, geom_node, defaults)) { Logger.Error("RobotLoader::BuildRobotBodyChildren::Cannot add robot geom."); return false; } } List<XmlNode> camera_nodes = XmlUtils.GetChildNodes(part_xml, "camera"); foreach (XmlNode camera_node in camera_nodes) { if (!AddCamera(parent, camera_node, defaults)) { Logger.Error("RobotLoader::BuildRobotBodyChildren::Cannot add robot camera."); return false; } } List<XmlNode> site_nodes = XmlUtils.GetChildNodes(part_xml, "site"); foreach (XmlNode site_node in site_nodes) { if (!AddRobotSite(parent, site_node, defaults)) { Logger.Error("RobotLoader::BuildRobotBodyChildren::Cannot add robot site."); return false; } } List<XmlNode> child_nodes = XmlUtils.GetChildNodes(part_xml, "body"); foreach (XmlNode child_node in child_nodes) { if (!AddRobotBody(parent, child_node, defaults)) { Logger.Error("RobotLoader::BuildRobotBodyChildren::Cannot add robot body."); return false; } } return true; } private KineticHierarchyController BuildJoint(KineticHierarchyController parent, string part_name, int id, XmlNode joint_xml, XmlUtils.Defaults defaults) { string joint_name = XmlUtils.GetString(joint_xml, "name", string.Format("{0}_joint_{1}", part_name, id)); XmlUtils.Defaults joint_defaults = defaults.GetSubclass("joint"); JointController joint = SceneUtils.InstantiateWithController<JointController>(joint_name); joint.Initialize(parent, joint_name, XmlUtils.GetVector3WithDefaults(joint_xml, joint_defaults, "pos", Vector3.zero), XmlUtils.GetRotationWithDefaults(joint_xml, joint_defaults, Quaternion.identity), XmlUtils.GetVector3WithDefaults(joint_xml, joint_defaults, "axis", Vector3.up), XmlUtils.GetVector2WithDefaults(joint_xml, joint_defaults, "range", new Vector2(float.MinValue, float.MaxValue)), XmlUtils.GetStringWithDefaults(joint_xml, joint_defaults, "type", null)); return joint; } private bool AddCamera(KineticHierarchyController parent, XmlNode camera_xml, XmlUtils.Defaults defaults) { string camera_name = XmlUtils.GetString(camera_xml, "name", null); if (camera_name == null) { camera_name = string.Format("camera_{0}", camera_counter_++); } Camera camera_prototype = SceneUtils.Find<Camera>(assembly_parts_, camera_name, SceneUtils.Find<Camera>(assembly_parts_, "__camera_template", null)); if (camera_prototype == null) { Logger.Error("RobotLoader::AddCamera::Cannot find camera prefab for: {0}", camera_name); return false; } Camera robot_camera = Instantiate(camera_prototype); robot_camera.name = camera_name; robot_camera.transform.parent = parent.transform; robot_camera.transform.localRotation = XmlUtils.GetRotation(camera_xml, Quaternion.identity); robot_camera.transform.localPosition = XmlUtils.GetVector3(camera_xml, "pos", Vector3.zero); robot_camera.transform.LookAt(robot_camera.transform.position + robot_camera.transform.TransformDirection(Vector3.back), robot_camera.transform.TransformDirection(Vector3.up)); robot_camera.fieldOfView = XmlUtils.GetFloat(camera_xml, "fovy", robot_camera.fieldOfView); return true; } public GameObject GetRobot() { return mr_robot_; } // Get the camera template, that will be cloned when instantiating // cameras in the XML. private GameObject FindCameraPrefab(string prefab_name) { for (int i = 0; i < assembly_parts_.transform.childCount; ++i) { if (assembly_parts_.transform.GetChild(i).name == prefab_name) { return assembly_parts_.transform.GetChild(i).gameObject; } } if (prefab_name == "__camera_template") { Logger.Warning("RobotLoader::FindCameraPrefab::Cannot find default camera."); return null; } else { Logger.Warning("RobotLoader::FindCameraPrefab::Cannot find {0}, looking for default.", prefab_name); return FindCameraPrefab("__camera_template"); } } private bool AddRobotSite(KineticHierarchyController parent, XmlNode site_xml, XmlUtils.Defaults defaults) { defaults = defaults.Resolve(XmlUtils.GetString(site_xml, "class", null)).GetSubclass("site"); SiteController site = null; string type = XmlUtils.GetStringWithDefaults(site_xml, defaults, "type", "sphere"); if ("box".Equals(type)) { site = AddRobotBoxSite(parent, site_xml, defaults); } else if ("plane".Equals(type)) { site = AddRobotPlaneSite(parent, site_xml, defaults); } else if (type == null || "sphere".Equals(type)) { site = AddRobotSphereSite(parent, site_xml, defaults); } else if ("cylinder".Equals(type)) { site = AddRobotCylinderSite(parent, site_xml, defaults); } else if ("capsule".Equals(type)) { site = AddRobotCapsuleSite(parent, site_xml, defaults); } else { Logger.Error("RobotLoader::AddRobotSite::Unsupported site type: {0} in {1}.", type, site_xml.OuterXml); return false; } if (site == null) { Logger.Error("RobotLoader::AddRobotSite::Cannot instantiate site."); return false; } ResolveMaterial(site_xml, defaults, site); return true; } private SiteController AddRobotBoxSite(KineticHierarchyController parent, XmlNode site_xml, XmlUtils.Defaults defaults) { return SiteController.CreateBox( parent, XmlUtils.GetString(site_xml, "name", null), XmlUtils.GetVector3WithDefaults(site_xml, defaults, "pos", Vector3.zero), XmlUtils.GetRotationWithDefaults(site_xml, defaults, Quaternion.identity), 2 * XmlUtils.GetVector3WithDefaults(site_xml, defaults, "size", Vector3.one)); } private SiteController AddRobotSphereSite(KineticHierarchyController parent, XmlNode site_xml, XmlUtils.Defaults defaults) { Vector3 size = XmlUtils.GetVector3WithDefaults(site_xml, defaults, "size", Vector3.zero); return SiteController.CreateSphere( parent, XmlUtils.GetString(site_xml, "name", null), XmlUtils.GetVector3WithDefaults(site_xml, defaults, "pos", Vector3.zero), XmlUtils.GetRotationWithDefaults(site_xml, defaults, Quaternion.identity), Vector3.one * 2.0f * size.x); } private SiteController AddRobotPlaneSite(KineticHierarchyController parent, XmlNode site_xml, XmlUtils.Defaults defaults) { Vector3 mujoco_size = XmlUtils.GetVector3WithDefaults(site_xml, defaults, "size", Vector3.one); return SiteController.CreatePlane( parent, XmlUtils.GetString(site_xml, "name", null), XmlUtils.GetVector3WithDefaults(site_xml, defaults, "pos", Vector3.zero), XmlUtils.GetRotationWithDefaults(site_xml, defaults, Quaternion.identity) * Quaternion.Euler(90.0f, 0.0f, 0.0f), 2 * new Vector3(mujoco_size.x / 10.0f, 0.0f, mujoco_size.y / 10.0f)); } private SiteController AddRobotCylinderSite(KineticHierarchyController parent, XmlNode site_xml, XmlUtils.Defaults defaults) { Vector2 mujoco_size = XmlUtils.GetVector2WithDefaults(site_xml, defaults, "size", Vector2.one); return SiteController.CreateCylinder( parent, XmlUtils.GetString(site_xml, "name", null), XmlUtils.GetVector3WithDefaults(site_xml, defaults, "pos", Vector3.zero), XmlUtils.GetRotationWithDefaults(site_xml, defaults, Quaternion.identity), 2 * new Vector3(mujoco_size.x, mujoco_size.y / 2.0f, mujoco_size.x)); } // A capsule can be defined with a from-to pair, which // complicates the code a little. private SiteController AddRobotCapsuleSite(KineticHierarchyController parent, XmlNode site_xml, XmlUtils.Defaults defaults) { string capsule_name = XmlUtils.GetString(site_xml, "name", null); if (XmlUtils.HasAttribute(site_xml, "fromto")) { string from_to = XmlUtils.GetStringWithDefaults(site_xml, defaults, "fromto", "0 0 0 0 0 0"); string[] from_to_split = from_to.Split(new char[] { ' ' }, System.StringSplitOptions.RemoveEmptyEntries); if (from_to_split.Length != 6) { Logger.Error("RobotLoader::AddRobotCapsuleGeom::Malformed fromto: {0}", from_to); return null; } Vector3 from_position = new Vector3(float.Parse(from_to_split[0]), float.Parse(from_to_split[1]), float.Parse(from_to_split[2])); Vector3 to_position = new Vector3(float.Parse(from_to_split[3]), float.Parse(from_to_split[4]), float.Parse(from_to_split[5])); Vector3 center = (from_position + to_position) / 2.0f; float half_length = (to_position - from_position).magnitude / 2.0f; Quaternion rotation = Quaternion.LookRotation(to_position - from_position); return SiteController.CreateCapsule( parent, capsule_name, center, rotation, half_length, XmlUtils.GetFloat(site_xml, "size", 1.0f)); } else { Vector2 mujoco_size = XmlUtils.GetVector2WithDefaults(site_xml, defaults, "size", Vector2.one); return SiteController.CreateCapsule( parent, capsule_name, XmlUtils.GetVector3WithDefaults(site_xml, defaults, "pos", Vector3.zero), XmlUtils.GetRotationWithDefaults(site_xml, defaults, Quaternion.identity), mujoco_size.y, mujoco_size.x); } } private bool AddRobotGeom(KineticHierarchyController parent, XmlNode geom_xml, XmlUtils.Defaults defaults) { defaults = defaults.Resolve(XmlUtils.GetString(geom_xml, "class", null)).GetSubclass("geom"); GeomController geom = null; string mesh_name = XmlUtils.GetStringWithDefaults(geom_xml, defaults, "mesh", null); string type = XmlUtils.GetStringWithDefaults(geom_xml, defaults, "type", null); if (mesh_name != null) { geom = AddRobotMeshGeom(parent, geom_xml, defaults, mesh_name); } else if ("box".Equals(type)) { geom = AddRobotBoxGeom(parent, geom_xml, defaults); } else if ("plane".Equals(type)) { geom = AddRobotPlaneGeom(parent, geom_xml, defaults); } else if (type == null || "sphere".Equals(type)) { geom = AddRobotSphereGeom(parent, geom_xml, defaults); } else if ("cylinder".Equals(type)) { geom = AddRobotCylinderGeom(parent, geom_xml, defaults); } else if ("capsule".Equals(type)) { geom = AddRobotCapsuleGeom(parent, geom_xml, defaults); } else { Logger.Error("RobotLoader::AddRobotGeom::Unsupported geom type: {0} in {1}.", type, geom_xml.OuterXml); return false; } if (geom == null) { Logger.Error("RobotLoader::AddRobotGeom::Cannot instantiate geom."); return false; } // Set the geom category for semantic segmentation UpdateGeomCategory(geom, geom_xml); // Find the material in the preloaded assets. ResolveMaterial(geom_xml, defaults, geom); return true; } private void UpdateGeomCategory(GeomController geom, XmlNode geom_xml) { string geom_category = GeomController.GetGeomCategoryFromXml(geom_xml); if (geom_category != null) { // the background has category id = 0, hence the +1 geom.category_id_ = geom_categories_.IndexOf(geom_category) + 1; geom.SetCategoryRendererProperties(); } } private GeomController AddRobotBoxGeom(KineticHierarchyController parent, XmlNode geom_xml, XmlUtils.Defaults defaults) { return GeomController.CreateBox( parent, XmlUtils.GetString(geom_xml, "name", null), XmlUtils.GetVector3WithDefaults(geom_xml, defaults, "pos", Vector3.zero), XmlUtils.GetRotationWithDefaults(geom_xml, defaults, Quaternion.identity), 2 * XmlUtils.GetVector3WithDefaults(geom_xml, defaults, "size", Vector3.one)); } private GeomController AddRobotSphereGeom(KineticHierarchyController parent, XmlNode geom_xml, XmlUtils.Defaults defaults) { return GeomController.CreateSphere( parent, XmlUtils.GetString(geom_xml, "name", null), XmlUtils.GetVector3WithDefaults(geom_xml, defaults, "pos", Vector3.zero), XmlUtils.GetRotationWithDefaults(geom_xml, defaults, Quaternion.identity), 2 * XmlUtils.GetVector3WithDefaults(geom_xml, defaults, "size", Vector3.one)); } private GeomController AddRobotPlaneGeom(KineticHierarchyController parent, XmlNode geom_xml, XmlUtils.Defaults defaults) { Vector3 mujoco_size = XmlUtils.GetVector3WithDefaults(geom_xml, defaults, "size", Vector3.one); return GeomController.CreatePlane( parent, XmlUtils.GetString(geom_xml, "name", null), XmlUtils.GetVector3WithDefaults(geom_xml, defaults, "pos", Vector3.zero), XmlUtils.GetRotationWithDefaults(geom_xml, defaults, Quaternion.identity) * Quaternion.Euler(90.0f, 0.0f, 0.0f), 2 * new Vector3(mujoco_size.x / 10.0f, 0.0f, mujoco_size.y / 10.0f)); } private GeomController AddRobotCylinderGeom(KineticHierarchyController parent, XmlNode geom_xml, XmlUtils.Defaults defaults) { Vector2 mujoco_size = XmlUtils.GetVector2WithDefaults(geom_xml, defaults, "size", Vector2.one); return GeomController.CreateCylinder( parent, XmlUtils.GetString(geom_xml, "name", null), XmlUtils.GetVector3WithDefaults(geom_xml, defaults, "pos", Vector3.zero), XmlUtils.GetRotationWithDefaults(geom_xml, defaults, Quaternion.identity), 2 * new Vector3(mujoco_size.x, mujoco_size.y / 2.0f, mujoco_size.x)); } // A capsule can be defined with a from-to pair, which complicates // the code a little. private GeomController AddRobotCapsuleGeom(KineticHierarchyController parent, XmlNode geom_xml, XmlUtils.Defaults defaults) { string capsule_name = XmlUtils.GetString(geom_xml, "name", null); if (XmlUtils.HasAttribute(geom_xml, "fromto")) { string from_to = XmlUtils.GetStringWithDefaults(geom_xml, defaults, "fromto", "0 0 0 0 0 0"); string[] from_to_split = from_to.Split(new char[] { ' ' }, System.StringSplitOptions.RemoveEmptyEntries); if (from_to_split.Length != 6) { Logger.Error("RobotLoader::AddRobotCapsuleGeom::Malformed fromto: {0}", from_to); return null; } Vector3 from_position = new Vector3(float.Parse(from_to_split[0]), float.Parse(from_to_split[1]), float.Parse(from_to_split[2])); Vector3 to_position = new Vector3(float.Parse(from_to_split[3]), float.Parse(from_to_split[4]), float.Parse(from_to_split[5])); Vector3 center = (from_position + to_position) / 2.0f; float half_length = (to_position - from_position).magnitude / 2.0f; Quaternion rotation = Quaternion.LookRotation(to_position - from_position); return GeomController.CreateCapsule( parent, capsule_name, center, rotation, half_length, XmlUtils.GetFloat(geom_xml, "size", 1.0f)); } else { Vector2 mujoco_size = XmlUtils.GetVector2WithDefaults(geom_xml, defaults, "size", Vector2.one); return GeomController.CreateCapsule( parent, capsule_name, XmlUtils.GetVector3WithDefaults(geom_xml, defaults, "pos", Vector3.zero), XmlUtils.GetRotationWithDefaults(geom_xml, defaults, Quaternion.identity), mujoco_size.y, mujoco_size.x); } } private GeomController AddRobotMeshGeom(KineticHierarchyController parent, XmlNode geom_xml, XmlUtils.Defaults defaults, string mesh_name) { GeomController mesh_prefab = FindGeomPrefab(mesh_name); if (mesh_prefab == null) { Logger.Error("RobotLoader::AddRobotMeshGeom::Cannot find mesh prefab for: {0}", mesh_name); return null; } GeomController mesh_geom = Instantiate<GeomController>(mesh_prefab, parent.transform); mesh_geom.Initialize(parent, mesh_name, XmlUtils.GetVector3WithDefaults(geom_xml, defaults, "pos", Vector3.zero), XmlUtils.GetRotationWithDefaults(geom_xml, defaults, Quaternion.identity)); return mesh_geom; } // Find the preloaded material and override with geom colors // (if defined). private void ResolveMaterial(XmlNode node_xml, XmlUtils.Defaults defaults, KineticHierarchyController mesh) { MeshRenderer[] renderers = mesh.GetComponentsInChildren<MeshRenderer>(); string material_name = XmlUtils.GetStringWithDefaults(node_xml, defaults, "material", ""); if (material_name != null) { if (materials_.ContainsKey(material_name)) { foreach (MeshRenderer mesh_renderer in renderers) { mesh_renderer.material = materials_[material_name]; } } } if (XmlUtils.HasAttributeWithDefaults(node_xml, defaults, "rgba")) { Color color = XmlUtils.GetColorWithDefaults(node_xml, defaults, "rgba", Color.white); foreach (MeshRenderer mesh_renderer in renderers) { if (mesh_renderer.material.HasProperty("_Color")) { mesh_renderer.material.color = color; } } } } // When a geom references a mesh, it needs to be retreived // and cloned from the preloaded assets. private GeomController FindGeomPrefab(string prefab_name) { return FindGeomPrefabInPrefabSet(assembly_parts_.transform, prefab_name); } // Travel down preloaded assets hierarchy to find the prefab mesh. private GeomController FindGeomPrefabInPrefabSet(Transform prefab_set, string part_name) { string[] split_name = part_name.Split(new char[] { ':' }, 2); if (split_name.Length == 2) { Transform prefab_sub_set = SceneUtils.Find<Transform>(prefab_set, split_name[0], null); if (prefab_sub_set != null) { return FindGeomPrefabInPrefabSet(prefab_sub_set, split_name[1]); } else { Logger.Warning( "RobotLoader::FindAssemblyPartPrefabInPrefabSet::Cannot find: {0} subset, trying default.", split_name[0]); part_name = split_name[1]; } } GeomController geom_prefab = SceneUtils.Find<GeomController>(prefab_set, part_name, null); if (geom_prefab != null) { return geom_prefab; } Logger.Warning("RobotLoader::FindAssemblyPartPrefabInPrefabSet::Cannot find assembly part: {0}", part_name); return null; } void OnDrawGizmos() { Gizmos.color = Color.red; Gizmos.DrawWireCube(transform.position, new Vector3(0.3f, 0.0f, 0.3f)); } }
841
orrb
openai
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; // This utility aggregates everything on the scene that is needed for rendering. // It contains the RobotLoader, responsible for loading the MuJoCo XML and building // the robot representation. It has a StateLoader that is responsible for processing // the qpos joint position values and setting Unity kinematic hierarchy respectively. // Finally the ComponentManager keeps track of initialized RendererComponents, that // perform visual domain randomization and other modifications / augmentations. public class SceneInstance : MonoBehaviour { [SerializeField] public StateLoader state_loader_ = null; [SerializeField] public RobotLoader robot_loader_ = null; [SerializeField] public ComponentManager component_manager_ = null; private int id_ = -1; private Dictionary<string, Camera> cameras_ = new Dictionary<string, Camera>(); public void SetId(int id) { id_ = id; } public int GetId() { return id_; } public bool Initialize(string robot_xml_path, string mapping_path, string asset_basedir) { gameObject.name = string.Format("Scene({0})", robot_xml_path); if (!robot_loader_.LoadRobot(robot_xml_path, asset_basedir)) { Logger.Error("SceneInstance::InitializeLocal::Could not load robot from: {0}.", robot_xml_path); return false; } if (!state_loader_.Initialize(mapping_path)) { Logger.Error("SceneInstance::InitializeLocal::Could not initialize state loader mappings from: {0}.", mapping_path); return false; } foreach (Camera scene_camera in GetComponentsInChildren<Camera>()) { cameras_.Add(scene_camera.name, scene_camera); } return true; } public bool AddComponent(string type, string name, string path, Orrb.RendererComponentConfig config, bool enabled) { return component_manager_.AddComponent(type, name, path, config, enabled); } public bool RemoveComponent(string name) { return component_manager_.RemoveComponent(name); } public bool UpdateComponent(string name, Orrb.RendererComponentConfig config) { return component_manager_.UpdateComponent(name, config); } public bool NextState() { return state_loader_.NextState(); } public bool UpdateState(IList<float> state) { return state_loader_.UpdateState(state); } public List<Camera> GetCameras(IList<string> camera_names) { List<Camera> scene_cameras = new List<Camera>(); foreach (string camera_name in camera_names) { if (!cameras_.ContainsKey(camera_name)) { Logger.Error("SceneManager::GetCamera::Cannot find camera: {0}.", camera_name); return null; } scene_cameras.Add(cameras_[camera_name]); } return scene_cameras; } public List<Camera> GetCameras() { List<Camera> cameras = new List<Camera>(); foreach (Camera scene_camera in cameras_.Values) { cameras.Add(scene_camera); } return cameras; } public GameObject GetRobot() { return robot_loader_.GetRobot(); } public StateLoader GetStateLoader() { return state_loader_; } public ComponentManager GetComponentManager() { return component_manager_; } }
108
orrb
openai
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; // The SceneManager knows how to create new instances of the prefabbed scene, and // keeps track of those instances. OTOH right now we use just one scene per server. public class SceneManager : MonoBehaviour { [SerializeField] public SceneInstance scene_instance_prefab_ = null; [SerializeField] public float scene_distance_ = 100.0f; private Dictionary<int, SceneInstance> scene_instances_ = new Dictionary<int, SceneInstance>(); private int next_id_ = 0; public SceneInstance CreateSceneInstance() { SceneInstance scene_instance = Instantiate<SceneInstance>(scene_instance_prefab_); scene_instance.SetId(next_id_); scene_instances_.Add(next_id_, scene_instance); return scene_instance; } public SceneInstance GetSceneInstance(int id) { if (scene_instances_.ContainsKey(id)) { return scene_instances_[id]; } else { return null; } } }
34
orrb
openai
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; using System.IO; // The StateLoader is responsible for updating the joint positions. The state loader can // use a csv file with example states loaded from disk - this is used in interactive / debug // mode. The state loader can also accept a list of qposes / joint positions - used by the // RenderServer in server mode. public class StateLoader : MonoBehaviour { // A mapping between the joint name and the qpos index in the state data. public class JointDefinition { public string name; public int index; public JointDefinition(string name, int index) { this.name = name; this.index = index; } } private JointController[] mapping_ = new JointController[0]; private List<float[]> states_ = new List<float[]>(); private Dictionary<string, Texture2D[]> footage_ = new Dictionary<string, Texture2D[]>(); private int current_frame_ = 0; private Texture2D reference_image_texture_ = null; private float reference_overlay_alpha_ = 0.5f; private bool playing_ = false; private Material overlay_material_ = null; [SerializeField] public bool automatic_update_ = false; // Update is called once per frame void Update() { if (automatic_update_) { NextState(); } } public void Toggle() { automatic_update_ = !automatic_update_; } // Load the joint:index mapping from the file. public bool Initialize(string mapping_path) { StreamReader reader = new StreamReader(mapping_path); List<JointDefinition> joint_definitions = new List<JointDefinition>(); string line = ""; while ((line = reader.ReadLine()) != null) { string[] split = line.Split(new char[] { ',' }, System.StringSplitOptions.RemoveEmptyEntries); joint_definitions.Add(new JointDefinition(split[0], int.Parse(split[1]))); } return Initialize(joint_definitions); } public bool Initialize(IList<JointDefinition> joint_definitions) { int max = -1; foreach (JointDefinition joint_definition in joint_definitions) { max = Mathf.Max(max, joint_definition.index); } max++; mapping_ = new JointController[max]; for (int i = 0; i < max; ++i) { mapping_[i] = null; } foreach (JointDefinition joint_definition in joint_definitions) { mapping_[joint_definition.index] = FindJoint(joint_definition.name); } overlay_material_ = new Material(Shader.Find("Unlit/Overlay")); return true; } // Open the csv file with the example states and load them. If there is footage for the states, // load it into textures. public bool InitializeStateStream(string stream_dir) { string state_path = string.Format("{0}/qpos.csv", stream_dir); StreamReader state_reader = new StreamReader(state_path); string line = ""; while ((line = state_reader.ReadLine()) != null) { string[] split = line.Split(new char[] { ',' }, System.StringSplitOptions.RemoveEmptyEntries); float[] state = new float[split.Length]; for (int i = 0; i < split.Length; ++i) { state[i] = float.Parse(split[i]); } states_.Add(state); } current_frame_ = 0; UpdateState(states_[0]); string footage_path = string.Format("{0}/footage.csv", stream_dir); if (File.Exists(footage_path)) { string[] footage_streams = File.ReadAllText(footage_path).Split( new char[] { ',' }, System.StringSplitOptions.RemoveEmptyEntries); foreach (string footage_stream in footage_streams) { string footage_stream_trimmed = footage_stream.Trim(); Texture2D[] stream_textures = new Texture2D[states_.Count]; footage_.Add(footage_stream_trimmed, stream_textures); for (int i = 0; i < states_.Count; ++i) { string file_name = string.Format("{0}/{1}_{2:D6}.png", stream_dir, footage_stream_trimmed, i); try { byte[] file_data = File.ReadAllBytes(file_name); stream_textures[i] = new Texture2D(2, 2); stream_textures[i].LoadImage(file_data); } catch (FileNotFoundException) { Logger.Warning("StateLoader::InitializeStateStream::Missing file: {0}.", file_name); stream_textures[i] = Texture2D.blackTexture; } } } } return true; } private JointController FindJoint(string joint_name) { JointController[] joints = GetComponentsInChildren<JointController>(); foreach (JointController joint in joints) { if (joint_name.Equals(joint.gameObject.name)) { return joint; } } Logger.Warning("StateLoader::FindJoint::Could not find joint: {0}", joint_name); return null; } private Quaternion GetQuaternion(IList<float> values, int index) { // Quaternions in MuJoCo are in the wxyz form and unity needs xyzw. return new Quaternion(values[index + 1], values[index + 2], values[index + 3], values[index]); } private Quaternion GetQuaternion(float[] values, int index) { // Quaternions in MuJoCo are in the wxyz form and unity needs xyzw. return new Quaternion(values[index + 1], values[index + 2], values[index + 3], values[index]); } // In interactive mode go to the next state loaded from the file. public bool NextState() { if (states_.Count == 0) { return false; } current_frame_ = (current_frame_ + 1) % states_.Count; return UpdateState(states_[current_frame_]); } // In interactive mode go to the previous state loaded from the file. public bool PreviousState() { if (states_.Count == 0) { return false; } current_frame_ = (current_frame_ + states_.Count - 1) % states_.Count; return UpdateState(states_[current_frame_]); } // Update all joints from a list of qposes. public bool UpdateState(IList<float> state) { for (int i = 0; i < Mathf.Min(state.Count, mapping_.Length); ++i) { JointController joint = mapping_[i]; if (joint != null) { switch (joint.joint_type_) { case JointController.JointType.Hinge: case JointController.JointType.Slide: joint.UpdateJoint(state[i]); break; case JointController.JointType.Ball: joint.UpdateJoint(GetQuaternion(state, i)); break; } } } return true; } public void DrawEditorGUI() { GUILayout.BeginVertical(); GUILayout.BeginHorizontal(); if (GUILayout.Button(" < ", GUILayout.Width(20))) { PreviousState(); } if (GUILayout.Button(" > ", GUILayout.Width(20))) { NextState(); } if (playing_ = GUILayout.Toggle(playing_, " Play ", GUILayout.Width(50))) { NextState(); } GUILayout.Space(10); int new_frame = (int)GUILayout.HorizontalSlider(current_frame_, 0.0f, states_.Count == 0 ? 0 : states_.Count - 1, GUILayout.ExpandWidth(true)); if (new_frame != current_frame_) { current_frame_ = new_frame; UpdateState(states_[current_frame_]); } GUIStyle style = new GUIStyle(GUI.skin.label); style.alignment = TextAnchor.UpperRight; GUILayout.Label(string.Format(" {0} / {1} ", current_frame_, states_.Count), style, GUILayout.Width(80)); GUILayout.EndHorizontal(); if (footage_.Count > 1) { RendererComponent.GUIHorizontalLine(1); GUILayout.BeginHorizontal(); int button_size = Mathf.Min(100, 380 / (footage_.Count + 1)); if (GUILayout.Button("Clear", GUILayout.Width(button_size), GUILayout.Height(button_size))) { reference_image_texture_ = null; } GUILayout.FlexibleSpace(); foreach (KeyValuePair<string, Texture2D[]> streams in footage_) { if (streams.Value != null && streams.Value.Length > current_frame_) { if (GUILayout.Button(streams.Value[current_frame_], GUILayout.Width(button_size), GUILayout.Height(button_size))) { reference_image_texture_ = streams.Value[current_frame_]; } GUILayout.FlexibleSpace(); } } GUILayout.FlexibleSpace(); GUILayout.EndHorizontal(); RendererComponent.GUISlider("overlay_alpha_", ref reference_overlay_alpha_, 0.0f, 1.0f); RendererComponent.GUIHorizontalLine(1); } GUILayout.EndVertical(); } // Draw the transparent overlay with the image from the footage read from disk. public void DrawSceneGUI() { if (reference_image_texture_ != null) { overlay_material_.SetFloat("_Alpha", reference_overlay_alpha_); Graphics.DrawTexture(new Rect(0, 0, Screen.width, Screen.height), reference_image_texture_, overlay_material_); } } }
254
orrb
openai
C#
using System; using System.IO; using UnityEngine; // The CameraCalibrator is a interactive mode component used to match the parameters // of the virtual cameras with their real world counterparts. When turned on the // calibrator allows to locally translate (camera dolly), rotate (euler) and modify // the field of view. If a real data set is loaded the camera overlay can be used to // visually align the two sources (sim and real). Finally the local transformations can // be colapsed and new position, rotation and fov values for the camera retreived. // // Configurable properties: // string camera_name - which camera does this calibrator modify, // vector3 local_position_delta - local translation to be applied, // quaternion local_rotation_delta - local rotation to be applied, // float fov_delta - field of view change to be applied. // // Read-only properties: // string mujoco_position - mujoco xml position vector as string, // string mujoco_rotation - mujoco xml rotation quaternion as string, // string mujoco_fov - mujoco xml fov as string, // string dactyl_camera_setup - full camera config in json. public class CameraCalibrator : RendererComponent { private Camera camera_ = null; private Vector3 original_position_ = Vector3.zero; private Quaternion original_rotation_ = Quaternion.identity; private float original_fov_ = 0.0f; private float dolly_x_ = 0.0f; private float dolly_y_ = 0.0f; private float dolly_z_ = 0.0f; private float euler_x_ = 0.0f; private float euler_y_ = 0.0f; private float euler_z_ = 0.0f; private float zoom_ = 0.0f; [SerializeField] [ConfigProperty] public string camera_name_ = ""; [SerializeField] [ConfigProperty] public Vector3 local_position_delta_ = Vector3.zero; [SerializeField] [ConfigProperty] public Quaternion local_rotation_delta_ = Quaternion.identity; [SerializeField] [ConfigProperty] public float fov_delta_ = 0.0f; // Cache the original position, rotation and fov. private void UpdateCamera() { Camera[] child_cameras = GetComponentsInChildren<Camera>(); foreach (Camera child_camera in child_cameras) { if (child_camera.name.Equals(camera_name_)) { camera_ = child_camera; original_fov_ = camera_.fieldOfView; original_rotation_ = camera_.transform.rotation; original_position_ = camera_.transform.position; } } } public override bool InitializeComponent(Orrb.RendererComponentConfig config) { camera_name_ = ConfigUtils.GetProperty("camera_name", config, camera_name_); UpdateCamera(); return UpdateComponent(config); } public override bool UpdateComponent(Orrb.RendererComponentConfig config) { string old_camera_name = camera_name_; ConfigUtils.GetProperties(this, config); if (!camera_name_.Equals(old_camera_name)) { UpdateCamera(); } return true; } public override bool RunComponent(RendererComponent.IOutputContext context) { if (camera_ != null) { // Apply the local transformations on top of the cached original position, // rotation and fov. Vector3 dolly = dolly_x_ * camera_.transform.right + dolly_y_ * camera_.transform.up + dolly_z_ * camera_.transform.forward; camera_.transform.position = original_position_ + local_position_delta_ + dolly; camera_.transform.rotation = original_rotation_ * local_rotation_delta_ * Quaternion.Euler( euler_x_, euler_y_, euler_z_); camera_.fieldOfView = original_fov_ + fov_delta_ + zoom_; } return true; } public override Orrb.RendererComponentConfig GetConfig() { Orrb.RendererComponentConfig config = base.GetConfig(); ConfigUtils.SetProperty("mujoco_position", config, GetCameraPositionString()); ConfigUtils.SetProperty("mujoco_rotation", config, GetCameraRotationString()); ConfigUtils.SetProperty("mujoco_fov", config, GetCameraFovString()); ConfigUtils.SetProperty("dactyl_camera_setup", config, GetDactylCameraSetupString()); return config; } private string GetCameraPositionString() { if (camera_ == null) { return ""; } return string.Format("{0} {1} {2}", camera_.transform.localPosition.x, camera_.transform.localPosition.y, camera_.transform.localPosition.z); } private string GetDactylCameraSetupString() { if (camera_ == null) { return ""; } Vector3 position = camera_.transform.localPosition; Quaternion old_rotation = camera_.transform.localRotation; camera_.transform.LookAt(camera_.transform.position + camera_.transform.TransformDirection(Vector3.back), camera_.transform.TransformDirection(Vector3.up)); Quaternion rotation = camera_.transform.localRotation; camera_.transform.localRotation = old_rotation; return string.Format("{{'name': '{0}', 'pos': [{1}, {2}, {3}], 'quat': [{4}, {5}, {6}, {7}], 'fovy': {8}}}", camera_.name, position.x, position.y, position.z, rotation.w, rotation.x, rotation.y, rotation.z, camera_.fieldOfView); } private string GetCameraRotationString() { if (camera_ == null) { return ""; } Quaternion old_rotation = camera_.transform.localRotation; camera_.transform.LookAt(camera_.transform.position + camera_.transform.TransformDirection(Vector3.back), camera_.transform.TransformDirection(Vector3.up)); Quaternion local_mujoco_rotation = camera_.transform.localRotation; camera_.transform.localRotation = old_rotation; return string.Format("{0} {1} {2} {3}", local_mujoco_rotation.w, local_mujoco_rotation.x, local_mujoco_rotation.y, local_mujoco_rotation.z); } private string GetCameraFovString() { if (camera_ == null) { return ""; } return string.Format("{0}", camera_.fieldOfView); } private void Colapse() { Vector3 dolly = dolly_x_ * camera_.transform.right + dolly_y_ * camera_.transform.up + dolly_z_ * camera_.transform.forward; local_position_delta_ += dolly; local_rotation_delta_ *= Quaternion.Euler(euler_x_, euler_y_, euler_z_); fov_delta_ += zoom_; dolly_x_ = dolly_y_ = dolly_z_ = euler_x_ = euler_y_ = euler_z_ = zoom_ = 0.0f; } private void Reset() { local_position_delta_ = Vector3.zero; local_rotation_delta_ = Quaternion.identity; fov_delta_ = 0.0f; dolly_x_ = dolly_y_ = dolly_z_ = euler_x_ = euler_y_ = euler_z_ = zoom_ = 0.0f; } public override void DrawEditorGUI() { GUILayout.BeginVertical(); RendererComponent.GUIField("camera_name", ref camera_name_); string label_text = GetCameraPositionString(); RendererComponent.GUIField("mujoco_position", ref label_text); label_text = GetCameraRotationString(); RendererComponent.GUIField("mujoco_rotation", ref label_text); label_text = GetCameraFovString(); RendererComponent.GUIField("mujoco_fov", ref label_text); RendererComponent.GUIHorizontalLine(1); RendererComponent.GUIVector3("local_position_delta", ref local_position_delta_); RendererComponent.GUIQuaternion("local_rotation_delta", ref local_rotation_delta_); RendererComponent.GUIHorizontalLine(1); RendererComponent.GUISlider("dolly_x", ref dolly_x_, -0.03f, 0.03f); RendererComponent.GUISlider("dolly_y", ref dolly_y_, -0.03f, 0.03f); RendererComponent.GUISlider("dolly_z", ref dolly_z_, -0.03f, 0.03f); RendererComponent.GUIHorizontalLine(1); RendererComponent.GUISlider("euler_x", ref euler_x_, -2.0f, 2.0f); RendererComponent.GUISlider("euler_y", ref euler_y_, -2.0f, 2.0f); RendererComponent.GUISlider("euler_z", ref euler_z_, -2.0f, 2.0f); RendererComponent.GUIHorizontalLine(1); RendererComponent.GUISlider("zoom", ref zoom_, -1f, 1f); RendererComponent.GUIHorizontalLine(1); GUILayout.BeginHorizontal(); if (RendererComponent.GUIButton("Colapse")) { Colapse(); } if (RendererComponent.GUIButton("Reset")) { Reset(); } GUILayout.EndHorizontal(); GUILayout.EndVertical(); } }
216
orrb
openai
C#
using System; using System.Collections; using System.Collections.Generic; using UnityEngine; // This component allows manual modification of clipping planes and fov of a scene // camera. Attach it to a specific camera (by specyfing a hierarchy path in the config) // as it does not search recursively in children. // // Configurable properties: // float near_clip - near cliping plane distance in meters, // float far_clip - far cliping plane distance in meters, // float field_of_view - field of view to set. public class CameraProperties : RendererComponent { [SerializeField] [ConfigProperty] public float near_clip_ = 0.1f; [SerializeField] [ConfigProperty] public float far_clip_ = 20.0f; [SerializeField] [ConfigProperty] public float field_of_view_ = 20.0f; private Camera camera_ = null; public override void DrawEditorGUI() { GUILayout.BeginVertical(); RendererComponent.GUISlider("near_clip", ref near_clip_, 0.001f, far_clip_); RendererComponent.GUISlider("far_clip", ref far_clip_, near_clip_, 100.0f); RendererComponent.GUISlider("field_of_view", ref field_of_view_, 1.0f, 180.0f); GUILayout.EndVertical(); } public override bool InitializeComponent(Orrb.RendererComponentConfig config) { // This component should be attached to an object with an actual camera. camera_ = GetComponent<Camera>(); if (camera_ == null) { return false; } return UpdateComponent(config); } public override bool RunComponent(RendererComponent.IOutputContext context) { if (camera_ == null) { return false; } camera_.nearClipPlane = near_clip_; camera_.farClipPlane = far_clip_; camera_.fieldOfView = field_of_view_; return true; } }
58
orrb
openai
C#
using System; using System.Collections; using System.Collections.Generic; using UnityEngine; // The Hide component allows to prevent some objects from being rendered. Three // types of entities are supported: bodies (with subobjects), geoms and sites. // For each entity type a list of prefixes can be provided, if an entity name // matches a prefix the entity will be hidden. // // Configurable properties: // string body_prefix - a comma separated list of prefixes, bodies to be hidden, // string geom_prefix - a comma separated list of prefixes, geoms to be hidden, // string site_prefix - a comma separated list of prefixes, sites to be hidden. public class Hide : RendererComponent { [SerializeField] [ConfigProperty] public string body_prefix_ = ""; [SerializeField] [ConfigProperty] public string geom_prefix_ = ""; [SerializeField] [ConfigProperty] public string site_prefix_ = ""; private List<BodyController> bodies_ = new List<BodyController>(); private List<GeomController> geoms_ = new List<GeomController>(); private List<SiteController> sites_ = new List<SiteController>(); // Unhide the hidden objects, clear the lists. private void Clear() { foreach (BodyController body in bodies_) { body.gameObject.SetActive(true); } bodies_.Clear(); foreach (GeomController geom in geoms_) { geom.gameObject.SetActive(true); } geoms_.Clear(); foreach (SiteController site in sites_) { site.gameObject.SetActive(true); } sites_.Clear(); } public override bool UpdateComponent(Orrb.RendererComponentConfig config) { base.UpdateComponent(config); Clear(); HideBodies(); HideGeoms(); HideSites(); return true; } // Hide bodies and keep a list of hidden ones. private void HideBodies() { string[] prefixes = body_prefix_.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); foreach (BodyController body in transform.GetComponentsInChildren<BodyController>()) { foreach (string prefix in prefixes) { if (body.name.StartsWith(prefix, StringComparison.Ordinal)) { bodies_.Add(body); body.gameObject.SetActive(false); break; } } } } // Hide geoms and keep a list of hidden ones. private void HideGeoms() { string[] prefixes = geom_prefix_.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); foreach (GeomController geom in transform.GetComponentsInChildren<GeomController>()) { foreach (string prefix in prefixes) { if (geom.name.StartsWith(prefix, StringComparison.Ordinal)) { geoms_.Add(geom); geom.gameObject.SetActive(false); break; } } } } // Hide sites and keep a list of hidden ones. private void HideSites() { string[] prefixes = site_prefix_.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); foreach (SiteController site in transform.GetComponentsInChildren<SiteController>()) { foreach (string prefix in prefixes) { if (site.name.StartsWith(prefix, StringComparison.Ordinal)) { sites_.Add(site); site.gameObject.SetActive(false); break; } } } } // This component does nothing each frame. The actual hiding happens on initialization / update. public override bool RunComponent(RendererComponent.IOutputContext context) { return true; } public override void DrawEditorGUI() { GUILayout.BeginVertical(); RendererComponent.GUIField("body_prefix", ref body_prefix_); RendererComponent.GUIField("geom_prefix", ref geom_prefix_); RendererComponent.GUIField("site_prefix", ref site_prefix_); GUILayout.EndVertical(); } }
117
orrb
openai
C#
using System; using System.Collections; using System.Collections.Generic; using UnityEngine; // LookAt component aligns a given object so that forward arrow points toward the // specified point. Attach this to a single entity. // // Configurable properties: // vector3 target_offset - absolute position to look at. public class LookAt : RendererComponent { [SerializeField] [ConfigProperty] public Vector3 target_offset_ = Vector3.zero; public override void DrawEditorGUI() { GUILayout.BeginVertical(); RendererComponent.GUIVector3("target_offset", ref target_offset_); GUILayout.EndVertical(); } public override bool RunComponent(RendererComponent.IOutputContext context) { transform.LookAt(target_offset_, Vector3.up); return true; } }
29
orrb
openai
C#
using System; using System.Linq; using System.Collections; using System.Collections.Generic; using UnityEngine; // The Tracker component produces auxiliary outputs that contain screen space positions of // scene objects. For each tracked object 'object' and each camera 'camera' this component // will emit a float output: 'tracker_object_X_camera' that contains 2 float values, i.e.: // the normalized x and y screen coordinates of the tracked object. // // Configurable properties: // string camera_names - comma separated list of camera names to track objects with, // string tracked_object_names - comma separated list of objects to track, // string tracked_onbject_aliases - comma separated list of nice, human readable names // for the tracked objects. public class Tracker : RendererComponent { [SerializeField] [ConfigProperty] public string camera_names_ = ""; [SerializeField] [ConfigProperty] public string tracked_object_names_ = ""; [SerializeField] [ConfigProperty] public string tracked_object_aliases_ = ""; private List<GameObject> tracked_objects_ = new List<GameObject>(); private string[] tracked_object_aliases_array_ = null; private List<Camera> cameras_ = new List<Camera>(); private Texture marker_ = null; private Material overlay_material_ = null; private Texture2D bounding_box_texture_ = null; private GUIStyle bounding_box_style_ = null; // Cache the cameras for tracking and object to be tracked. private void UpdateObjectsAndCameras() { string[] camera_names = camera_names_.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); string[] tracked_object_names = tracked_object_names_.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); Camera[] cameras = GetComponentsInChildren<Camera>(); foreach (Camera tracking_camera in cameras) { foreach (string camera_name in camera_names) { if (camera_name.Equals(tracking_camera.name)) { cameras_.Add(tracking_camera); break; } } } Transform[] children = GetComponentsInChildren<Transform>(); foreach (Transform tracked_object in children) { foreach (string tracked_object_name in tracked_object_names) { if (tracked_object_name.Equals(tracked_object.name)) { tracked_objects_.Add(tracked_object.gameObject); break; } } } } public override bool InitializeComponent(Orrb.RendererComponentConfig config) { camera_names_ = ConfigUtils.GetProperty("camera_names", config, camera_names_); tracked_object_names_ = ConfigUtils.GetProperty("tracked_object_names", config, tracked_object_names_); tracked_object_aliases_array_ = tracked_object_aliases_.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); // Load the crosshair texture and the overlay material, to be used in interactive mode. marker_ = Resources.Load<Texture>("Marker"); overlay_material_ = new Material(Shader.Find("Unlit/Overlay")); // Prepare the bounding box style from a 9-sliced background texture. bounding_box_texture_ = Resources.Load<Texture2D>("BoundingBox"); bounding_box_style_ = new GUIStyle(); bounding_box_style_.normal.background = bounding_box_texture_; bounding_box_style_.border = new RectOffset(1, 1, 1, 1); UpdateObjectsAndCameras(); return UpdateComponent(config); } public override bool UpdateComponent(Orrb.RendererComponentConfig config) { string old_camera_names = camera_names_; string old_tracked_object_names = tracked_object_names_; string old_tracked_object_aliases_ = tracked_object_aliases_; ConfigUtils.GetProperties(this, config); if (!camera_names_.Equals(old_camera_names) || !tracked_object_names_.Equals(old_tracked_object_names)) { UpdateObjectsAndCameras(); } if (!tracked_object_aliases_.Equals(old_tracked_object_aliases_)) { tracked_object_aliases_array_ = tracked_object_aliases_.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); } return true; } private List<Vector3> GetAllCornersOfBounds(Bounds b) { // Take all the corner points in 3D. List<float> x_offsets = new List<float>() { -b.size.x / 2, b.size.x / 2 }; List<float> y_offsets = new List<float>() { -b.size.y / 2, b.size.y / 2 }; List<float> z_offsets = new List<float>() { -b.size.z / 2, b.size.z / 2 }; List<Vector3> corners = new List<Vector3>(); foreach (float x_offset in x_offsets) { foreach (float y_offset in y_offsets) { foreach (float z_offset in z_offsets) { corners.Add(new Vector3(b.center.x + x_offset, b.center.y + y_offset, b.center.z + z_offset)); } } } return corners; } private float[] GetBounds2DViewPoint(Camera tracking_camera, GameObject tracked_object, bool use_screen_point = false) { // Map the bounds of the `tracked_object` onto a 2D view of the `tracking_camera` by mapping // all the corners and selecting the minimum / maximum on both x and y axises. // If use_screen_point = true, we map the corner points to screen; otherwise map to viewport. List<float> xs = new List<float>(); List<float> ys = new List<float>(); // Go through the bounds of each render associated with the object. foreach (Renderer render in tracked_object.GetComponentsInChildren<Renderer>()) { // Map all the corner points into the 2D view point or screen point. List<Vector3> corners = GetAllCornersOfBounds(render.bounds); foreach (Vector3 corner in corners) { Vector3 transformed_point; if (use_screen_point) { transformed_point = tracking_camera.WorldToScreenPoint(corner); } else { transformed_point = tracking_camera.WorldToViewportPoint(corner); } xs.Add(transformed_point.x); ys.Add(transformed_point.y); } } // Return the minimum and maximum values of x-axis and y-axis. return new float[] { xs.Min(), xs.Max(), ys.Min(), ys.Max() }; } public override void DrawEditorGUI() { GUILayout.BeginVertical(); foreach (Camera tracking_camera in cameras_) { GUILayout.Label(tracking_camera.name); for (int i = 0; i < tracked_objects_.Count; ++i) { GameObject tracked_object = tracked_objects_[i]; string tracked_object_name = tracked_object.name; if (i < tracked_object_aliases_array_.Length) { tracked_object_name = tracked_object_aliases_array_[i]; } Vector3 viewport_position = tracking_camera.WorldToViewportPoint(tracked_object.transform.position); float[] bounds_2D = GetBounds2DViewPoint(tracking_camera, tracked_object); GUILayout.BeginHorizontal(); GUILayout.Label(tracked_object.name, GUILayout.Width(200)); GUILayout.Label(string.Format("x:{0:0.000###}", viewport_position.x), GUILayout.Width(100)); GUILayout.Label(string.Format("y:{0:0.000###}", viewport_position.y), GUILayout.Width(100)); GUILayout.EndHorizontal(); GUILayout.Label("Bounds: " + string.Join(", ", bounds_2D)); RendererComponent.GUIHorizontalLine(1); } } GUILayout.EndVertical(); } // Draw the tracking crosshair on the overlay, in interactive mode. public override void DrawSceneGUI() { foreach (Camera tracking_camera in cameras_) { if (tracking_camera.Equals(Camera.current)) { foreach (GameObject tracked_object in tracked_objects_) { Vector3 screen_position = tracking_camera.WorldToScreenPoint(tracked_object.transform.position); Graphics.DrawTexture(new Rect(screen_position.x - 5, Screen.height - screen_position.y - 5, 10, 10), marker_, overlay_material_); float[] bounds = GetBounds2DViewPoint(tracking_camera, tracked_object, true); float y_min = Screen.height - bounds[3]; float y_max = Screen.height - bounds[2]; float height = y_max - y_min; float width = bounds[1] - bounds[0]; GUI.Box(new Rect(bounds[0], y_min, width, height), "", bounding_box_style_); } } } } public override bool RunComponent(IOutputContext context) { foreach (Camera tracking_camera in cameras_) { for (int i = 0; i < tracked_objects_.Count; ++i) { GameObject tracked_object = tracked_objects_[i]; string tracked_object_name = tracked_object.name; if (i < tracked_object_aliases_array_.Length) { // If there is an alias on the alias list, use it. tracked_object_name = tracked_object_aliases_array_[i]; } // Return the center of the object. string stream_name = string.Format("tracker_{0}_X_{1}", tracked_object_name, tracking_camera.name); Vector3 viewport_position = tracking_camera.WorldToViewportPoint(tracked_object.transform.position); context.OutputFloats(stream_name, new float[] { viewport_position.x, viewport_position.y }); // Return the bounding box string stream_name_bbox = string.Format("tracker_{0}_X_{1}_bbox", tracked_object_name, tracking_camera.name); context.OutputFloats(stream_name_bbox, GetBounds2DViewPoint(tracking_camera, tracked_object)); } } return true; } }
221
orrb
openai
C#
using System; using System.Collections; using System.Collections.Generic; using UnityEngine; // TRS component performs manual translation, rotation and scaling of the subject. // Attach it to a specific object. // // Configurable properties: // vector3 translate - local translation, // quaternion rotate - local rotation, // vector3 scale - local scale. public class TranslateRotateScale : RendererComponent { [SerializeField][ConfigProperty] public Vector3 translate_ = Vector3.zero; [SerializeField][ConfigProperty] public Quaternion rotate_ = Quaternion.identity; [SerializeField][ConfigProperty] public Vector3 scale_ = Vector3.one; private Vector3 original_translate_ = Vector3.zero; private Quaternion original_rotate_ = Quaternion.identity; private Vector3 original_scale_ = Vector3.one; public override void DrawEditorGUI() { GUILayout.BeginVertical(); RendererComponent.GUIVector3("translate", ref translate_); RendererComponent.GUIQuaternion("rotate", ref rotate_); RendererComponent.GUIVector3("scale", ref scale_); GUILayout.EndVertical(); } public override bool InitializeComponent(Orrb.RendererComponentConfig config) { // Cache the original translation, location, scale. original_translate_ = transform.localPosition; original_rotate_ = transform.localRotation; original_scale_ = transform.localScale; return UpdateComponent(config); } public override bool RunComponent(RendererComponent.IOutputContext context) { // Apply the local transformation to the cached original values. transform.localPosition = original_translate_ + translate_; transform.localRotation = original_rotate_ * rotate_; Vector3 new_scale = original_scale_; new_scale.Scale(scale_); transform.localScale = new_scale; return true; } }
55
orrb
openai
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; // A class representing MuJoCo bodies / worldbody elements. public class BodyController : KineticHierarchyController { }
7
orrb
openai
C#
using System.Collections; using System.Collections.Generic; using System.Xml; using UnityEngine; // A class representing MuJoCo geom elements. public class GeomController : GeometricPrimitiveController { public int category_id_ = 0; // Static counters used to generate names for anonymous primitive objects. private static int box_count_ = 0; private static int cylinder_count_ = 0; private static int plane_count_ = 0; private static int sphere_count_ = 0; private static int capsule_count_ = 0; private static string ResolveName(string prefix, ref int counter, string name) { if (name == null) { return string.Format("{0}_{1}", prefix, counter++); } return name; } // Helper factory methods used to create geometric primitives. Pass a // hierarchy parent, a name (or use a generated one, and basic parametric // geometry values. public static GeomController CreateBox(KineticHierarchyController parent, string name, Vector3 position, Quaternion rotation, Vector3 box_size) { name = ResolveName("box", ref box_count_, name); GeomController box_geom = SceneUtils.InstantiateWithController<GeomController>(name); box_geom.InitializeBox(parent, name, position, rotation, box_size); return box_geom; } public static GeomController CreateSphere(KineticHierarchyController parent, string name, Vector3 position, Quaternion rotation, Vector3 sphere_size) { name = ResolveName("sphere", ref sphere_count_, name); GeomController sphere_geom = SceneUtils.InstantiateWithController<GeomController>(name); sphere_geom.InitializeSphere(parent, name, position, rotation, sphere_size); return sphere_geom; } public static GeomController CreatePlane(KineticHierarchyController parent, string name, Vector3 position, Quaternion rotation, Vector3 plane_size) { name = ResolveName("plane", ref plane_count_, name); GeomController plane_geom = SceneUtils.InstantiateWithController<GeomController>(name); plane_geom.InitializePlane(parent, name, position, rotation, plane_size); return plane_geom; } public static GeomController CreateCylinder(KineticHierarchyController parent, string name, Vector3 position, Quaternion rotation, Vector3 cylinder_size) { if (name == null) { name = string.Format("cylinder_{0}", cylinder_count_++); } GeomController plane_geom = SceneUtils.InstantiateWithController<GeomController>(name); plane_geom.InitializeCylinder(parent, name, position, rotation, cylinder_size); return plane_geom; } public static GeomController CreateCapsule(KineticHierarchyController parent, string name, Vector3 position, Quaternion rotation, float half_length, float radius) { name = ResolveName("capsule", ref capsule_count_, name); GeomController capsule_geom = SceneUtils.InstantiateWithController<GeomController>(name); capsule_geom.InitializeCapsule(parent, name, position, rotation, half_length, radius); return capsule_geom; } public void SetCategoryRendererProperties() { // Create render PropertyBlocks with the appropriate category color // for each geom such that it can be rendered as a segmentation map. var mpb = new MaterialPropertyBlock(); Renderer[] renderers = GetComponentsInChildren<Renderer>(); foreach (var r in renderers) { var id = r.gameObject.GetInstanceID(); var layer = r.gameObject.layer; mpb.SetColor("_CategoryColor", EncodeCategoryAsColor(category_id_)); r.SetPropertyBlock(mpb); } } private static Color EncodeCategoryAsColor(int category_id) { // set all RGB channels to same value var color = new Color32(0, 0, 0, 255); color.r = (byte)(category_id); color.g = (byte)(category_id); color.b = (byte)(category_id); return color; } public static string GetGeomCategoryFromXml(XmlNode geom_xml) { string geom_name = XmlUtils.GetString(geom_xml, "name", null); if (geom_name == null) { return null; } string[] split_name = geom_name.Split(new char[] { ':' }, 2); if (split_name.Length > 1) { return split_name[0]; } return null; } }
107
orrb
openai
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; // A base class for sites and geoms that can contain primitive shapes // like: boxes, spheres, etc. public class GeometricPrimitiveController : KineticHierarchyController { protected void InitializeBox(KineticHierarchyController parent, string box_name, Vector3 position, Quaternion rotation, Vector3 box_size) { Initialize(parent, box_name, position, rotation); GameObject the_box = GameObject.CreatePrimitive(PrimitiveType.Cube); Destroy(the_box.GetComponent<BoxCollider>()); the_box.name = string.Format("{0}(primitive)", name); the_box.transform.parent = transform; the_box.transform.localPosition = Vector3.zero; the_box.transform.localRotation = Quaternion.identity; the_box.transform.localScale = box_size; } protected void InitializeSphere(KineticHierarchyController parent, string sphere_name, Vector3 position, Quaternion rotation, Vector3 sphere_size) { Initialize(parent, sphere_name, position, rotation); GameObject the_sphere = GameObject.CreatePrimitive(PrimitiveType.Sphere); Destroy(the_sphere.GetComponent<SphereCollider>()); the_sphere.name = string.Format("{0}(primitive)", name); the_sphere.transform.parent = transform; the_sphere.transform.localPosition = Vector3.zero; the_sphere.transform.localRotation = Quaternion.identity; the_sphere.transform.localScale = sphere_size; } protected void InitializePlane(KineticHierarchyController parent, string plane_name, Vector3 position, Quaternion rotation, Vector3 plane_size) { Initialize(parent, plane_name, position, rotation); GameObject the_plane = GameObject.CreatePrimitive(PrimitiveType.Plane); Destroy(the_plane.GetComponent<MeshCollider>()); the_plane.name = string.Format("{0}(primitive)", name); the_plane.transform.parent = transform; the_plane.transform.localPosition = Vector3.zero; the_plane.transform.localRotation = Quaternion.identity; the_plane.transform.localScale = plane_size; } protected void InitializeCylinder(KineticHierarchyController parent, string cylinder_name, Vector3 position, Quaternion rotation, Vector3 cylinder_size) { Initialize(parent, cylinder_name, position, rotation); GameObject the_cylinder = GameObject.CreatePrimitive(PrimitiveType.Cylinder); Destroy(the_cylinder.GetComponent<MeshCollider>()); the_cylinder.name = string.Format("{0}(primitive)", name); the_cylinder.transform.parent = transform; the_cylinder.transform.localPosition = Vector3.zero; the_cylinder.transform.localRotation = Quaternion.identity * Quaternion.Euler(90.0f, 0.0f, 0.0f); the_cylinder.transform.localScale = cylinder_size; } // A capsule is created from a cylinder and two spheres. protected void InitializeCapsule(KineticHierarchyController parent, string capsule_name, Vector3 position, Quaternion rotation, float half_length, float radius) { Initialize(parent, capsule_name, position, rotation); GameObject top_sphere = GameObject.CreatePrimitive(PrimitiveType.Sphere); Destroy(top_sphere.GetComponent<SphereCollider>()); top_sphere.name = string.Format("{0}(top_sphere)", name); top_sphere.transform.parent = transform; top_sphere.transform.localPosition = Vector3.forward * half_length; top_sphere.transform.localScale = Vector3.one * radius; top_sphere.transform.localRotation = Quaternion.identity; GameObject bottom_sphere = GameObject.CreatePrimitive(PrimitiveType.Sphere); Destroy(bottom_sphere.GetComponent<SphereCollider>()); bottom_sphere.name = string.Format("{0}(bottom_sphere)", name); bottom_sphere.transform.parent = transform; bottom_sphere.transform.localPosition = Vector3.back * half_length; bottom_sphere.transform.localScale = Vector3.one * radius; bottom_sphere.transform.localRotation = Quaternion.identity; GameObject cylinder = GameObject.CreatePrimitive(PrimitiveType.Cylinder); Destroy(cylinder.GetComponent<MeshCollider>()); Destroy(cylinder.GetComponent<CapsuleCollider>()); cylinder.name = string.Format("{0}(cylinder)", name); cylinder.transform.parent = transform; cylinder.transform.localPosition = Vector3.zero; cylinder.transform.localScale = new Vector3(radius, half_length, radius); cylinder.transform.localRotation = Quaternion.identity * Quaternion.Euler(90.0f, 0.0f, 0.0f); } }
89
orrb
openai
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; // A class representing MuJoCo joint elements. public class JointController : KineticHierarchyController { public enum JointType { Hinge, Slide, Ball, Free }; // Static counter used when generating names for anonymous joints. private static int joint_count_ = 0; // Rotation axis for hinges, slide direction for slides. [SerializeField] public Vector3 axis_ = Vector3.up; // Current joint position for hinges and slides. [SerializeField] public float value_ = 0.0f; // Current joint position for free joints. [SerializeField] public Vector3 value_vector_ = Vector3.zero; // Current joint rotation for ball/free joints. [SerializeField] public Quaternion value_quaternion_ = Quaternion.identity; // Range limits for hinges and slides. [SerializeField] public Vector2 range_ = new Vector2(float.MinValue, float.MaxValue); [SerializeField] public JointType joint_type_ = JointType.Hinge; // This update is provided in order to make debugging (setting joint values) // in the Unity Editor easier. void Update() { if (joint_type_ == JointType.Free) { UpdateJoint(value_vector_, value_quaternion_); } else if (joint_type_ == JointType.Ball) { UpdateJoint(value_quaternion_); } else { UpdateJoint(value_); } } private static JointType ParseType(string joint_type) { if (joint_type == null || "hinge".Equals(joint_type, System.StringComparison.OrdinalIgnoreCase)) { return JointType.Hinge; } else if ("ball".Equals(joint_type, System.StringComparison.OrdinalIgnoreCase)) { return JointType.Ball; } else if ("slide".Equals(joint_type, System.StringComparison.OrdinalIgnoreCase)) { return JointType.Slide; } else if ("free".Equals(joint_type, System.StringComparison.OrdinalIgnoreCase)) { return JointType.Free; } else { Logger.Warning("JointController::ParseType::Unknown joint type: {0}.", joint_type); return JointType.Hinge; } } public void Initialize(KineticHierarchyController parent, string joint_name, Vector3 position, Quaternion rotation, Vector3 axis, Vector2 range, string joint_type) { if (joint_name == null) { joint_name = string.Format("joint_{0}", joint_count_++); } Initialize(parent, joint_name, position, rotation); this.axis_ = axis; this.range_ = range; this.joint_type_ = ParseType(joint_type); } // Set a hinge or a slide to a given position, clamped to range limits. // Pass radians for hinges. Slides are unitless and depend on configured // slide axis. public void UpdateJoint(float value) { value_ = value; if (joint_type_ == JointType.Slide) { transform.localPosition = Mathf.Clamp(value_, range_.x, range_.y) * axis_; } else if (joint_type_ == JointType.Hinge) { transform.localRotation = Quaternion.AngleAxis(Mathf.Clamp(value_, range_.x, range_.y) * Mathf.Rad2Deg, axis_); } else { Logger.Warning("JointController::UpdateJoint::UpdateJoint(float) called for a ball/free joint: {0}.", name); } } // Set a ball joint to a given rotation. public void UpdateJoint(Quaternion value) { value_quaternion_ = value; if (joint_type_ == JointType.Ball) { transform.localRotation = value; } else { Logger.Warning("JointController::UpdateJoint::UpdateJoint(Quaternion) called for a non ball joint: {0}.", name); } } public void UpdateJoint(Vector3 position, Quaternion rotation) { value_vector_ = position; value_quaternion_ = rotation; if (joint_type_ == JointType.Free) { transform.localRotation = rotation; transform.localPosition = position; } else { Logger.Warning("JointController::UpdateJoint::UpdateJoint(Vector3, Quaterion) called for non-free: {0}.", name); } } }
117
orrb
openai
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; // Base class for scene elements that have the parent-child hierarchy, // e.g.: bodies in bodies, geoms, sites, joints in bodies. public class KineticHierarchyController : MonoBehaviour { public void Initialize(KineticHierarchyController parent, string name, Vector3 position, Quaternion rotation) { this.name = name; if (parent != null) { transform.parent = parent.transform; } transform.localPosition = position; transform.localRotation = rotation; } }
19
orrb
openai
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; // Similar to geoms, sites also can contain primitive geometry. public class SiteController : GeometricPrimitiveController { // Static counter used to generate names for anonymous sites. private static int site_count_ = 0; private static string ResolveName(string name) { if (name == null) { return string.Format("site_{0}", site_count_++); } return name; } public static SiteController CreateBox(KineticHierarchyController parent, string name, Vector3 position, Quaternion rotation, Vector3 box_size) { name = ResolveName(name); SiteController box_site = SceneUtils.InstantiateWithController<SiteController>(name); box_site.InitializeBox(parent, name, position, rotation, box_size); return box_site; } public static SiteController CreateSphere(KineticHierarchyController parent, string name, Vector3 position, Quaternion rotation, Vector3 sphere_size) { name = ResolveName(name); SiteController sphere_site = SceneUtils.InstantiateWithController<SiteController>(name); sphere_site.InitializeSphere(parent, name, position, rotation, sphere_size); return sphere_site; } public static SiteController CreatePlane(KineticHierarchyController parent, string name, Vector3 position, Quaternion rotation, Vector3 plane_size) { name = ResolveName(name); SiteController plane_site = SceneUtils.InstantiateWithController<SiteController>(name); plane_site.InitializePlane(parent, name, position, rotation, plane_size); return plane_site; } public static SiteController CreateCylinder(KineticHierarchyController parent, string name, Vector3 position, Quaternion rotation, Vector3 cylinder_size) { name = ResolveName(name); SiteController cylinder_site = SceneUtils.InstantiateWithController<SiteController>(name); cylinder_site.InitializeCylinder(parent, name, position, rotation, cylinder_size); return cylinder_site; } public static SiteController CreateCapsule(KineticHierarchyController parent, string name, Vector3 position, Quaternion rotation, float half_length, float radius) { name = ResolveName(name); SiteController capsule_geom = SceneUtils.InstantiateWithController<SiteController>(name); capsule_geom.InitializeCapsule(parent, name, position, rotation, half_length, radius); return capsule_geom; } }
59
orrb
openai
C#
using System.IO; using UnityEditor; using UnityEditor.SceneManagement; using UnityEngine; public class BuildUtils : EditorWindow { public enum SupportedTarget { Linux64 = BuildTarget.StandaloneLinux64, MacOS64 = BuildTarget.StandaloneOSX, All }; private string name_ = ""; private string version_ = ""; private SupportedTarget target_ = SupportedTarget.All; private string scene_ = ""; private bool development_build_ = false; private static string GetArgument(string argument, string default_value) { string plus_argument = string.Format("+{0}", argument); string[] commandline_args = System.Environment.GetCommandLineArgs(); for (int i = 0; i < commandline_args.Length - 1; ++i) { if (commandline_args[i].Equals(plus_argument)) { return commandline_args[i + 1]; } } return default_value; } private static BuildTarget GetTarget() { string target = GetArgument("target", "Linux-x64_86"); if (target.Equals("Linux-x86_64")) { return BuildTarget.StandaloneLinux64; } else if (target.Equals("Darwin-x86_64")) { return BuildTarget.StandaloneOSX; } else { return BuildTarget.StandaloneLinux64; } } private static string GetLocationDir(string name, BuildTarget target, string version) { if (target == BuildTarget.StandaloneLinux64) { return string.Format("Builds/{0}-Linux-x86_64-{1}/", name, version); } else if (target == BuildTarget.StandaloneOSX) { return string.Format("Builds/{0}-Darwin-x86_64-{1}/", name, version); } else { return string.Format("Builds/{0}-{1}/", name, version); } } private static string GetBinary(string location_dir, string name, BuildTarget target) { if (target == BuildTarget.StandaloneLinux64) { return string.Format("{0}{1}.x86_64", location_dir, name); } else if (target == BuildTarget.StandaloneOSX) { return string.Format("{0}{1}.app", location_dir, name); } else { return string.Format("{0}{1}", location_dir, name); } } static void BuildCommandline() { BuildTarget target = GetTarget(); string name = GetArgument("name", "StandaloneRenderer"); string version = GetArgument("version", System.DateTime.Now.ToString("yyyyMMdd")); string scene = GetArgument("scene", "Assets/Scenes/StandaloneRenderer.unity"); string development_build = GetArgument("devel", "false"); BuildOneTarget(target, scene, name, version, "true".Equals(development_build)); } [MenuItem("GPR/Build")] static void BuildMenu() { BuildUtils build_utils = ScriptableObject.CreateInstance<BuildUtils>(); build_utils.titleContent = new GUIContent("GPR Build Utils"); build_utils.position = new Rect(Screen.width / 2, Screen.height / 2, 600, 100); build_utils.minSize = new Vector2(600, 100); build_utils.maxSize = new Vector2(600, 100); build_utils.name_ = UnityEngine.SceneManagement.SceneManager.GetActiveScene().name; build_utils.scene_ = UnityEngine.SceneManagement.SceneManager.GetActiveScene().path; build_utils.version_ = System.DateTime.Now.ToString("yyyyMMdd"); build_utils.ShowUtility(); } private static void BuildOneTarget(BuildTarget target, string scene, string name, string version, bool development_build) { string directory = GetLocationDir(name, target, version); string binary = GetBinary(directory, name, target); System.Console.WriteLine(string.Format("Building: {0}", binary)); Directory.CreateDirectory(directory); BuildPlayerOptions options = new BuildPlayerOptions(); if (development_build) { options.options = BuildOptions.Development; } else { options.options = BuildOptions.None; } options.scenes = new string[] { scene }; options.target = target; options.locationPathName = binary; BuildPipeline.BuildPlayer(options); } private static void BuildAndPush(SupportedTarget target, string scene, string name, string version, bool development_build) { if (target == SupportedTarget.All) { BuildOneTarget(BuildTarget.StandaloneLinux64, scene, name, version, development_build); BuildOneTarget(BuildTarget.StandaloneOSX, scene, name, version, development_build); } else { BuildOneTarget((BuildTarget)target, scene, name, version, development_build); } } void OnGUI() { EditorGUILayout.BeginVertical(); name_ = EditorGUILayout.TextField("name", name_); version_ = EditorGUILayout.TextField("version", version_); target_ = (SupportedTarget)EditorGUILayout.EnumPopup("target", target_); scene_ = EditorGUILayout.TextField("scene", scene_); development_build_ = EditorGUILayout.Toggle("development build", development_build_); bool should_build = false; if (GUILayout.Button("Build")) { should_build = true; } EditorGUILayout.EndVertical(); if (should_build) { BuildAndPush(target_, scene_, name_, version_, development_build_); } } }
138
orrb
openai
C#
// <auto-generated> // Generated by the protocol buffer compiler. DO NOT EDIT! // source: orrb/protos/RendererConfig.proto // </auto-generated> #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace Orrb { /// <summary>Holder for reflection information generated from orrb/protos/RendererConfig.proto</summary> public static partial class RendererConfigReflection { #region Descriptor /// <summary>File descriptor for orrb/protos/RendererConfig.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static RendererConfigReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "CiBvcnJiL3Byb3Rvcy9SZW5kZXJlckNvbmZpZy5wcm90bxIEb3JyYiIzCgVD", "b2xvchIJCgFyGAEgASgCEgkKAWcYAiABKAISCQoBYhgDIAEoAhIJCgFhGAQg", "ASgCIjgKClF1YXRlcm5pb24SCQoBeBgBIAEoAhIJCgF5GAIgASgCEgkKAXoY", "AyABKAISCQoBdxgEIAEoAiIqCgdWZWN0b3IzEgkKAXgYASABKAISCQoBeRgC", "IAEoAhIJCgF6GAMgASgCIh8KB1ZlY3RvcjISCQoBeBgBIAEoAhIJCgF5GAIg", "ASgCInEKDlJlbmRlcmVyQ29uZmlnEhYKDm1vZGVsX3htbF9wYXRoGAEgASgJ", "EhoKEm1vZGVsX21hcHBpbmdfcGF0aBgCIAEoCRIrCgpjb21wb25lbnRzGAMg", "AygLMhcub3JyYi5SZW5kZXJlckNvbXBvbmVudCJsChFSZW5kZXJlckNvbXBv", "bmVudBIMCgR0eXBlGAEgASgJEgwKBG5hbWUYAiABKAkSDAoEcGF0aBgDIAEo", "CRItCgZjb25maWcYBCABKAsyHS5vcnJiLlJlbmRlcmVyQ29tcG9uZW50Q29u", "ZmlnIp4KChdSZW5kZXJlckNvbXBvbmVudENvbmZpZxJICg5pbnRfcHJvcGVy", "dGllcxgBIAMoCzIwLm9ycmIuUmVuZGVyZXJDb21wb25lbnRDb25maWcuSW50", "UHJvcGVydGllc0VudHJ5EkwKEGZsb2F0X3Byb3BlcnRpZXMYAiADKAsyMi5v", "cnJiLlJlbmRlcmVyQ29tcG9uZW50Q29uZmlnLkZsb2F0UHJvcGVydGllc0Vu", "dHJ5Ek4KEXN0cmluZ19wcm9wZXJ0aWVzGAMgAygLMjMub3JyYi5SZW5kZXJl", "ckNvbXBvbmVudENvbmZpZy5TdHJpbmdQcm9wZXJ0aWVzRW50cnkSSgoPYm9v", "bF9wcm9wZXJ0aWVzGAQgAygLMjEub3JyYi5SZW5kZXJlckNvbXBvbmVudENv", "bmZpZy5Cb29sUHJvcGVydGllc0VudHJ5ElYKFXF1YXRlcm5pb25fcHJvcGVy", "dGllcxgFIAMoCzI3Lm9ycmIuUmVuZGVyZXJDb21wb25lbnRDb25maWcuUXVh", "dGVybmlvblByb3BlcnRpZXNFbnRyeRJQChJ2ZWN0b3IzX3Byb3BlcnRpZXMY", "BiADKAsyNC5vcnJiLlJlbmRlcmVyQ29tcG9uZW50Q29uZmlnLlZlY3RvcjNQ", "cm9wZXJ0aWVzRW50cnkSUAoSdmVjdG9yMl9wcm9wZXJ0aWVzGAcgAygLMjQu", "b3JyYi5SZW5kZXJlckNvbXBvbmVudENvbmZpZy5WZWN0b3IyUHJvcGVydGll", "c0VudHJ5EkoKD2VudW1fcHJvcGVydGllcxgIIAMoCzIxLm9ycmIuUmVuZGVy", "ZXJDb21wb25lbnRDb25maWcuRW51bVByb3BlcnRpZXNFbnRyeRJMChBjb2xv", "cl9wcm9wZXJ0aWVzGAkgAygLMjIub3JyYi5SZW5kZXJlckNvbXBvbmVudENv", "bmZpZy5Db2xvclByb3BlcnRpZXNFbnRyeRo0ChJJbnRQcm9wZXJ0aWVzRW50", "cnkSCwoDa2V5GAEgASgJEg0KBXZhbHVlGAIgASgFOgI4ARo2ChRGbG9hdFBy", "b3BlcnRpZXNFbnRyeRILCgNrZXkYASABKAkSDQoFdmFsdWUYAiABKAI6AjgB", "GjcKFVN0cmluZ1Byb3BlcnRpZXNFbnRyeRILCgNrZXkYASABKAkSDQoFdmFs", "dWUYAiABKAk6AjgBGjUKE0Jvb2xQcm9wZXJ0aWVzRW50cnkSCwoDa2V5GAEg", "ASgJEg0KBXZhbHVlGAIgASgIOgI4ARpNChlRdWF0ZXJuaW9uUHJvcGVydGll", "c0VudHJ5EgsKA2tleRgBIAEoCRIfCgV2YWx1ZRgCIAEoCzIQLm9ycmIuUXVh", "dGVybmlvbjoCOAEaRwoWVmVjdG9yM1Byb3BlcnRpZXNFbnRyeRILCgNrZXkY", "ASABKAkSHAoFdmFsdWUYAiABKAsyDS5vcnJiLlZlY3RvcjM6AjgBGkcKFlZl", "Y3RvcjJQcm9wZXJ0aWVzRW50cnkSCwoDa2V5GAEgASgJEhwKBXZhbHVlGAIg", "ASgLMg0ub3JyYi5WZWN0b3IyOgI4ARo1ChNFbnVtUHJvcGVydGllc0VudHJ5", "EgsKA2tleRgBIAEoCRINCgV2YWx1ZRgCIAEoCToCOAEaQwoUQ29sb3JQcm9w", "ZXJ0aWVzRW50cnkSCwoDa2V5GAEgASgJEhoKBXZhbHVlGAIgASgLMgsub3Jy", "Yi5Db2xvcjoCOAFiBnByb3RvMw==")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.Color), global::Orrb.Color.Parser, new[]{ "R", "G", "B", "A" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.Quaternion), global::Orrb.Quaternion.Parser, new[]{ "X", "Y", "Z", "W" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.Vector3), global::Orrb.Vector3.Parser, new[]{ "X", "Y", "Z" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.Vector2), global::Orrb.Vector2.Parser, new[]{ "X", "Y" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.RendererConfig), global::Orrb.RendererConfig.Parser, new[]{ "ModelXmlPath", "ModelMappingPath", "Components" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.RendererComponent), global::Orrb.RendererComponent.Parser, new[]{ "Type", "Name", "Path", "Config" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.RendererComponentConfig), global::Orrb.RendererComponentConfig.Parser, new[]{ "IntProperties", "FloatProperties", "StringProperties", "BoolProperties", "QuaternionProperties", "Vector3Properties", "Vector2Properties", "EnumProperties", "ColorProperties" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, null, null, null, null, null, null, null, null, }) })); } #endregion } #region Messages public sealed partial class Color : pb::IMessage<Color> { private static readonly pb::MessageParser<Color> _parser = new pb::MessageParser<Color>(() => new Color()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<Color> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RendererConfigReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Color() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Color(Color other) : this() { r_ = other.r_; g_ = other.g_; b_ = other.b_; a_ = other.a_; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Color Clone() { return new Color(this); } /// <summary>Field number for the "r" field.</summary> public const int RFieldNumber = 1; private float r_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public float R { get { return r_; } set { r_ = value; } } /// <summary>Field number for the "g" field.</summary> public const int GFieldNumber = 2; private float g_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public float G { get { return g_; } set { g_ = value; } } /// <summary>Field number for the "b" field.</summary> public const int BFieldNumber = 3; private float b_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public float B { get { return b_; } set { b_ = value; } } /// <summary>Field number for the "a" field.</summary> public const int AFieldNumber = 4; private float a_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public float A { get { return a_; } set { a_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as Color); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(Color other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(R, other.R)) return false; if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(G, other.G)) return false; if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(B, other.B)) return false; if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(A, other.A)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (R != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(R); if (G != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(G); if (B != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(B); if (A != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(A); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (R != 0F) { output.WriteRawTag(13); output.WriteFloat(R); } if (G != 0F) { output.WriteRawTag(21); output.WriteFloat(G); } if (B != 0F) { output.WriteRawTag(29); output.WriteFloat(B); } if (A != 0F) { output.WriteRawTag(37); output.WriteFloat(A); } if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (R != 0F) { size += 1 + 4; } if (G != 0F) { size += 1 + 4; } if (B != 0F) { size += 1 + 4; } if (A != 0F) { size += 1 + 4; } if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(Color other) { if (other == null) { return; } if (other.R != 0F) { R = other.R; } if (other.G != 0F) { G = other.G; } if (other.B != 0F) { B = other.B; } if (other.A != 0F) { A = other.A; } _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 13: { R = input.ReadFloat(); break; } case 21: { G = input.ReadFloat(); break; } case 29: { B = input.ReadFloat(); break; } case 37: { A = input.ReadFloat(); break; } } } } } public sealed partial class Quaternion : pb::IMessage<Quaternion> { private static readonly pb::MessageParser<Quaternion> _parser = new pb::MessageParser<Quaternion>(() => new Quaternion()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<Quaternion> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RendererConfigReflection.Descriptor.MessageTypes[1]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Quaternion() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Quaternion(Quaternion other) : this() { x_ = other.x_; y_ = other.y_; z_ = other.z_; w_ = other.w_; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Quaternion Clone() { return new Quaternion(this); } /// <summary>Field number for the "x" field.</summary> public const int XFieldNumber = 1; private float x_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public float X { get { return x_; } set { x_ = value; } } /// <summary>Field number for the "y" field.</summary> public const int YFieldNumber = 2; private float y_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public float Y { get { return y_; } set { y_ = value; } } /// <summary>Field number for the "z" field.</summary> public const int ZFieldNumber = 3; private float z_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public float Z { get { return z_; } set { z_ = value; } } /// <summary>Field number for the "w" field.</summary> public const int WFieldNumber = 4; private float w_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public float W { get { return w_; } set { w_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as Quaternion); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(Quaternion other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(X, other.X)) return false; if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(Y, other.Y)) return false; if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(Z, other.Z)) return false; if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(W, other.W)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (X != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(X); if (Y != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(Y); if (Z != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(Z); if (W != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(W); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (X != 0F) { output.WriteRawTag(13); output.WriteFloat(X); } if (Y != 0F) { output.WriteRawTag(21); output.WriteFloat(Y); } if (Z != 0F) { output.WriteRawTag(29); output.WriteFloat(Z); } if (W != 0F) { output.WriteRawTag(37); output.WriteFloat(W); } if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (X != 0F) { size += 1 + 4; } if (Y != 0F) { size += 1 + 4; } if (Z != 0F) { size += 1 + 4; } if (W != 0F) { size += 1 + 4; } if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(Quaternion other) { if (other == null) { return; } if (other.X != 0F) { X = other.X; } if (other.Y != 0F) { Y = other.Y; } if (other.Z != 0F) { Z = other.Z; } if (other.W != 0F) { W = other.W; } _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 13: { X = input.ReadFloat(); break; } case 21: { Y = input.ReadFloat(); break; } case 29: { Z = input.ReadFloat(); break; } case 37: { W = input.ReadFloat(); break; } } } } } public sealed partial class Vector3 : pb::IMessage<Vector3> { private static readonly pb::MessageParser<Vector3> _parser = new pb::MessageParser<Vector3>(() => new Vector3()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<Vector3> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RendererConfigReflection.Descriptor.MessageTypes[2]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Vector3() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Vector3(Vector3 other) : this() { x_ = other.x_; y_ = other.y_; z_ = other.z_; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Vector3 Clone() { return new Vector3(this); } /// <summary>Field number for the "x" field.</summary> public const int XFieldNumber = 1; private float x_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public float X { get { return x_; } set { x_ = value; } } /// <summary>Field number for the "y" field.</summary> public const int YFieldNumber = 2; private float y_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public float Y { get { return y_; } set { y_ = value; } } /// <summary>Field number for the "z" field.</summary> public const int ZFieldNumber = 3; private float z_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public float Z { get { return z_; } set { z_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as Vector3); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(Vector3 other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(X, other.X)) return false; if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(Y, other.Y)) return false; if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(Z, other.Z)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (X != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(X); if (Y != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(Y); if (Z != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(Z); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (X != 0F) { output.WriteRawTag(13); output.WriteFloat(X); } if (Y != 0F) { output.WriteRawTag(21); output.WriteFloat(Y); } if (Z != 0F) { output.WriteRawTag(29); output.WriteFloat(Z); } if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (X != 0F) { size += 1 + 4; } if (Y != 0F) { size += 1 + 4; } if (Z != 0F) { size += 1 + 4; } if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(Vector3 other) { if (other == null) { return; } if (other.X != 0F) { X = other.X; } if (other.Y != 0F) { Y = other.Y; } if (other.Z != 0F) { Z = other.Z; } _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 13: { X = input.ReadFloat(); break; } case 21: { Y = input.ReadFloat(); break; } case 29: { Z = input.ReadFloat(); break; } } } } } public sealed partial class Vector2 : pb::IMessage<Vector2> { private static readonly pb::MessageParser<Vector2> _parser = new pb::MessageParser<Vector2>(() => new Vector2()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<Vector2> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RendererConfigReflection.Descriptor.MessageTypes[3]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Vector2() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Vector2(Vector2 other) : this() { x_ = other.x_; y_ = other.y_; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Vector2 Clone() { return new Vector2(this); } /// <summary>Field number for the "x" field.</summary> public const int XFieldNumber = 1; private float x_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public float X { get { return x_; } set { x_ = value; } } /// <summary>Field number for the "y" field.</summary> public const int YFieldNumber = 2; private float y_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public float Y { get { return y_; } set { y_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as Vector2); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(Vector2 other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(X, other.X)) return false; if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(Y, other.Y)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (X != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(X); if (Y != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(Y); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (X != 0F) { output.WriteRawTag(13); output.WriteFloat(X); } if (Y != 0F) { output.WriteRawTag(21); output.WriteFloat(Y); } if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (X != 0F) { size += 1 + 4; } if (Y != 0F) { size += 1 + 4; } if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(Vector2 other) { if (other == null) { return; } if (other.X != 0F) { X = other.X; } if (other.Y != 0F) { Y = other.Y; } _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 13: { X = input.ReadFloat(); break; } case 21: { Y = input.ReadFloat(); break; } } } } } public sealed partial class RendererConfig : pb::IMessage<RendererConfig> { private static readonly pb::MessageParser<RendererConfig> _parser = new pb::MessageParser<RendererConfig>(() => new RendererConfig()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<RendererConfig> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RendererConfigReflection.Descriptor.MessageTypes[4]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RendererConfig() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RendererConfig(RendererConfig other) : this() { modelXmlPath_ = other.modelXmlPath_; modelMappingPath_ = other.modelMappingPath_; components_ = other.components_.Clone(); _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RendererConfig Clone() { return new RendererConfig(this); } /// <summary>Field number for the "model_xml_path" field.</summary> public const int ModelXmlPathFieldNumber = 1; private string modelXmlPath_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string ModelXmlPath { get { return modelXmlPath_; } set { modelXmlPath_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "model_mapping_path" field.</summary> public const int ModelMappingPathFieldNumber = 2; private string modelMappingPath_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string ModelMappingPath { get { return modelMappingPath_; } set { modelMappingPath_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "components" field.</summary> public const int ComponentsFieldNumber = 3; private static readonly pb::FieldCodec<global::Orrb.RendererComponent> _repeated_components_codec = pb::FieldCodec.ForMessage(26, global::Orrb.RendererComponent.Parser); private readonly pbc::RepeatedField<global::Orrb.RendererComponent> components_ = new pbc::RepeatedField<global::Orrb.RendererComponent>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<global::Orrb.RendererComponent> Components { get { return components_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as RendererConfig); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(RendererConfig other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (ModelXmlPath != other.ModelXmlPath) return false; if (ModelMappingPath != other.ModelMappingPath) return false; if(!components_.Equals(other.components_)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (ModelXmlPath.Length != 0) hash ^= ModelXmlPath.GetHashCode(); if (ModelMappingPath.Length != 0) hash ^= ModelMappingPath.GetHashCode(); hash ^= components_.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (ModelXmlPath.Length != 0) { output.WriteRawTag(10); output.WriteString(ModelXmlPath); } if (ModelMappingPath.Length != 0) { output.WriteRawTag(18); output.WriteString(ModelMappingPath); } components_.WriteTo(output, _repeated_components_codec); if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (ModelXmlPath.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(ModelXmlPath); } if (ModelMappingPath.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(ModelMappingPath); } size += components_.CalculateSize(_repeated_components_codec); if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(RendererConfig other) { if (other == null) { return; } if (other.ModelXmlPath.Length != 0) { ModelXmlPath = other.ModelXmlPath; } if (other.ModelMappingPath.Length != 0) { ModelMappingPath = other.ModelMappingPath; } components_.Add(other.components_); _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 10: { ModelXmlPath = input.ReadString(); break; } case 18: { ModelMappingPath = input.ReadString(); break; } case 26: { components_.AddEntriesFrom(input, _repeated_components_codec); break; } } } } } public sealed partial class RendererComponent : pb::IMessage<RendererComponent> { private static readonly pb::MessageParser<RendererComponent> _parser = new pb::MessageParser<RendererComponent>(() => new RendererComponent()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<RendererComponent> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RendererConfigReflection.Descriptor.MessageTypes[5]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RendererComponent() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RendererComponent(RendererComponent other) : this() { type_ = other.type_; name_ = other.name_; path_ = other.path_; config_ = other.config_ != null ? other.config_.Clone() : null; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RendererComponent Clone() { return new RendererComponent(this); } /// <summary>Field number for the "type" field.</summary> public const int TypeFieldNumber = 1; private string type_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Type { get { return type_; } set { type_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "name" field.</summary> public const int NameFieldNumber = 2; private string name_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Name { get { return name_; } set { name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "path" field.</summary> public const int PathFieldNumber = 3; private string path_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Path { get { return path_; } set { path_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "config" field.</summary> public const int ConfigFieldNumber = 4; private global::Orrb.RendererComponentConfig config_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::Orrb.RendererComponentConfig Config { get { return config_; } set { config_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as RendererComponent); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(RendererComponent other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Type != other.Type) return false; if (Name != other.Name) return false; if (Path != other.Path) return false; if (!object.Equals(Config, other.Config)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (Type.Length != 0) hash ^= Type.GetHashCode(); if (Name.Length != 0) hash ^= Name.GetHashCode(); if (Path.Length != 0) hash ^= Path.GetHashCode(); if (config_ != null) hash ^= Config.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (Type.Length != 0) { output.WriteRawTag(10); output.WriteString(Type); } if (Name.Length != 0) { output.WriteRawTag(18); output.WriteString(Name); } if (Path.Length != 0) { output.WriteRawTag(26); output.WriteString(Path); } if (config_ != null) { output.WriteRawTag(34); output.WriteMessage(Config); } if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (Type.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Type); } if (Name.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); } if (Path.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Path); } if (config_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(Config); } if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(RendererComponent other) { if (other == null) { return; } if (other.Type.Length != 0) { Type = other.Type; } if (other.Name.Length != 0) { Name = other.Name; } if (other.Path.Length != 0) { Path = other.Path; } if (other.config_ != null) { if (config_ == null) { Config = new global::Orrb.RendererComponentConfig(); } Config.MergeFrom(other.Config); } _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 10: { Type = input.ReadString(); break; } case 18: { Name = input.ReadString(); break; } case 26: { Path = input.ReadString(); break; } case 34: { if (config_ == null) { Config = new global::Orrb.RendererComponentConfig(); } input.ReadMessage(Config); break; } } } } } public sealed partial class RendererComponentConfig : pb::IMessage<RendererComponentConfig> { private static readonly pb::MessageParser<RendererComponentConfig> _parser = new pb::MessageParser<RendererComponentConfig>(() => new RendererComponentConfig()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<RendererComponentConfig> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RendererConfigReflection.Descriptor.MessageTypes[6]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RendererComponentConfig() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RendererComponentConfig(RendererComponentConfig other) : this() { intProperties_ = other.intProperties_.Clone(); floatProperties_ = other.floatProperties_.Clone(); stringProperties_ = other.stringProperties_.Clone(); boolProperties_ = other.boolProperties_.Clone(); quaternionProperties_ = other.quaternionProperties_.Clone(); vector3Properties_ = other.vector3Properties_.Clone(); vector2Properties_ = other.vector2Properties_.Clone(); enumProperties_ = other.enumProperties_.Clone(); colorProperties_ = other.colorProperties_.Clone(); _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RendererComponentConfig Clone() { return new RendererComponentConfig(this); } /// <summary>Field number for the "int_properties" field.</summary> public const int IntPropertiesFieldNumber = 1; private static readonly pbc::MapField<string, int>.Codec _map_intProperties_codec = new pbc::MapField<string, int>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForInt32(16), 10); private readonly pbc::MapField<string, int> intProperties_ = new pbc::MapField<string, int>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::MapField<string, int> IntProperties { get { return intProperties_; } } /// <summary>Field number for the "float_properties" field.</summary> public const int FloatPropertiesFieldNumber = 2; private static readonly pbc::MapField<string, float>.Codec _map_floatProperties_codec = new pbc::MapField<string, float>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForFloat(21), 18); private readonly pbc::MapField<string, float> floatProperties_ = new pbc::MapField<string, float>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::MapField<string, float> FloatProperties { get { return floatProperties_; } } /// <summary>Field number for the "string_properties" field.</summary> public const int StringPropertiesFieldNumber = 3; private static readonly pbc::MapField<string, string>.Codec _map_stringProperties_codec = new pbc::MapField<string, string>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForString(18), 26); private readonly pbc::MapField<string, string> stringProperties_ = new pbc::MapField<string, string>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::MapField<string, string> StringProperties { get { return stringProperties_; } } /// <summary>Field number for the "bool_properties" field.</summary> public const int BoolPropertiesFieldNumber = 4; private static readonly pbc::MapField<string, bool>.Codec _map_boolProperties_codec = new pbc::MapField<string, bool>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForBool(16), 34); private readonly pbc::MapField<string, bool> boolProperties_ = new pbc::MapField<string, bool>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::MapField<string, bool> BoolProperties { get { return boolProperties_; } } /// <summary>Field number for the "quaternion_properties" field.</summary> public const int QuaternionPropertiesFieldNumber = 5; private static readonly pbc::MapField<string, global::Orrb.Quaternion>.Codec _map_quaternionProperties_codec = new pbc::MapField<string, global::Orrb.Quaternion>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForMessage(18, global::Orrb.Quaternion.Parser), 42); private readonly pbc::MapField<string, global::Orrb.Quaternion> quaternionProperties_ = new pbc::MapField<string, global::Orrb.Quaternion>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::MapField<string, global::Orrb.Quaternion> QuaternionProperties { get { return quaternionProperties_; } } /// <summary>Field number for the "vector3_properties" field.</summary> public const int Vector3PropertiesFieldNumber = 6; private static readonly pbc::MapField<string, global::Orrb.Vector3>.Codec _map_vector3Properties_codec = new pbc::MapField<string, global::Orrb.Vector3>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForMessage(18, global::Orrb.Vector3.Parser), 50); private readonly pbc::MapField<string, global::Orrb.Vector3> vector3Properties_ = new pbc::MapField<string, global::Orrb.Vector3>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::MapField<string, global::Orrb.Vector3> Vector3Properties { get { return vector3Properties_; } } /// <summary>Field number for the "vector2_properties" field.</summary> public const int Vector2PropertiesFieldNumber = 7; private static readonly pbc::MapField<string, global::Orrb.Vector2>.Codec _map_vector2Properties_codec = new pbc::MapField<string, global::Orrb.Vector2>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForMessage(18, global::Orrb.Vector2.Parser), 58); private readonly pbc::MapField<string, global::Orrb.Vector2> vector2Properties_ = new pbc::MapField<string, global::Orrb.Vector2>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::MapField<string, global::Orrb.Vector2> Vector2Properties { get { return vector2Properties_; } } /// <summary>Field number for the "enum_properties" field.</summary> public const int EnumPropertiesFieldNumber = 8; private static readonly pbc::MapField<string, string>.Codec _map_enumProperties_codec = new pbc::MapField<string, string>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForString(18), 66); private readonly pbc::MapField<string, string> enumProperties_ = new pbc::MapField<string, string>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::MapField<string, string> EnumProperties { get { return enumProperties_; } } /// <summary>Field number for the "color_properties" field.</summary> public const int ColorPropertiesFieldNumber = 9; private static readonly pbc::MapField<string, global::Orrb.Color>.Codec _map_colorProperties_codec = new pbc::MapField<string, global::Orrb.Color>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForMessage(18, global::Orrb.Color.Parser), 74); private readonly pbc::MapField<string, global::Orrb.Color> colorProperties_ = new pbc::MapField<string, global::Orrb.Color>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::MapField<string, global::Orrb.Color> ColorProperties { get { return colorProperties_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as RendererComponentConfig); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(RendererComponentConfig other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (!IntProperties.Equals(other.IntProperties)) return false; if (!FloatProperties.Equals(other.FloatProperties)) return false; if (!StringProperties.Equals(other.StringProperties)) return false; if (!BoolProperties.Equals(other.BoolProperties)) return false; if (!QuaternionProperties.Equals(other.QuaternionProperties)) return false; if (!Vector3Properties.Equals(other.Vector3Properties)) return false; if (!Vector2Properties.Equals(other.Vector2Properties)) return false; if (!EnumProperties.Equals(other.EnumProperties)) return false; if (!ColorProperties.Equals(other.ColorProperties)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; hash ^= IntProperties.GetHashCode(); hash ^= FloatProperties.GetHashCode(); hash ^= StringProperties.GetHashCode(); hash ^= BoolProperties.GetHashCode(); hash ^= QuaternionProperties.GetHashCode(); hash ^= Vector3Properties.GetHashCode(); hash ^= Vector2Properties.GetHashCode(); hash ^= EnumProperties.GetHashCode(); hash ^= ColorProperties.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { intProperties_.WriteTo(output, _map_intProperties_codec); floatProperties_.WriteTo(output, _map_floatProperties_codec); stringProperties_.WriteTo(output, _map_stringProperties_codec); boolProperties_.WriteTo(output, _map_boolProperties_codec); quaternionProperties_.WriteTo(output, _map_quaternionProperties_codec); vector3Properties_.WriteTo(output, _map_vector3Properties_codec); vector2Properties_.WriteTo(output, _map_vector2Properties_codec); enumProperties_.WriteTo(output, _map_enumProperties_codec); colorProperties_.WriteTo(output, _map_colorProperties_codec); if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; size += intProperties_.CalculateSize(_map_intProperties_codec); size += floatProperties_.CalculateSize(_map_floatProperties_codec); size += stringProperties_.CalculateSize(_map_stringProperties_codec); size += boolProperties_.CalculateSize(_map_boolProperties_codec); size += quaternionProperties_.CalculateSize(_map_quaternionProperties_codec); size += vector3Properties_.CalculateSize(_map_vector3Properties_codec); size += vector2Properties_.CalculateSize(_map_vector2Properties_codec); size += enumProperties_.CalculateSize(_map_enumProperties_codec); size += colorProperties_.CalculateSize(_map_colorProperties_codec); if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(RendererComponentConfig other) { if (other == null) { return; } intProperties_.Add(other.intProperties_); floatProperties_.Add(other.floatProperties_); stringProperties_.Add(other.stringProperties_); boolProperties_.Add(other.boolProperties_); quaternionProperties_.Add(other.quaternionProperties_); vector3Properties_.Add(other.vector3Properties_); vector2Properties_.Add(other.vector2Properties_); enumProperties_.Add(other.enumProperties_); colorProperties_.Add(other.colorProperties_); _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 10: { intProperties_.AddEntriesFrom(input, _map_intProperties_codec); break; } case 18: { floatProperties_.AddEntriesFrom(input, _map_floatProperties_codec); break; } case 26: { stringProperties_.AddEntriesFrom(input, _map_stringProperties_codec); break; } case 34: { boolProperties_.AddEntriesFrom(input, _map_boolProperties_codec); break; } case 42: { quaternionProperties_.AddEntriesFrom(input, _map_quaternionProperties_codec); break; } case 50: { vector3Properties_.AddEntriesFrom(input, _map_vector3Properties_codec); break; } case 58: { vector2Properties_.AddEntriesFrom(input, _map_vector2Properties_codec); break; } case 66: { enumProperties_.AddEntriesFrom(input, _map_enumProperties_codec); break; } case 74: { colorProperties_.AddEntriesFrom(input, _map_colorProperties_codec); break; } } } } } #endregion } #endregion Designer generated code
1,533
orrb
openai
C#
// <auto-generated> // Generated by the protocol buffer compiler. DO NOT EDIT! // source: orrb/protos/RenderService.proto // </auto-generated> #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace Orrb { /// <summary>Holder for reflection information generated from orrb/protos/RenderService.proto</summary> public static partial class RenderServiceReflection { #region Descriptor /// <summary>File descriptor for orrb/protos/RenderService.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static RenderServiceReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "Ch9vcnJiL3Byb3Rvcy9SZW5kZXJTZXJ2aWNlLnByb3RvEgRvcnJiGiBvcnJi", "L3Byb3Rvcy9SZW5kZXJlckNvbmZpZy5wcm90byLXAgoSUmVuZGVyQmF0Y2hS", "ZXF1ZXN0EjsKB2VudHJpZXMYASADKAsyKi5vcnJiLlJlbmRlckJhdGNoUmVx", "dWVzdC5CYXRjaFJlcXVlc3RFbnRyeRINCgV3aWR0aBgCIAEoBRIOCgZoZWln", "aHQYAyABKAUSEAoIc2NlbmVfaWQYBCABKAUSFAoMY2FtZXJhX25hbWVzGAUg", "AygJEhIKCmJhdGNoX3NlZWQYBiABKAUSFwoPdXNlX2VudHJ5X3NlZWRzGAcg", "ASgIEhQKDHJlbmRlcl9hbHBoYRgIIAEoCBIUCgxyZW5kZXJfZGVwdGgYCSAB", "KAgSFgoOcmVuZGVyX25vcm1hbHMYCiABKAgSGwoTcmVuZGVyX3NlZ21lbnRh", "dGlvbhgLIAEoCBovChFCYXRjaFJlcXVlc3RFbnRyeRIMCgRxcG9zGAEgAygC", "EgwKBHNlZWQYAiABKAUiyQUKE1JlbmRlckJhdGNoUmVzcG9uc2USNgoHc3Ry", "ZWFtcxgBIAMoCzIlLm9ycmIuUmVuZGVyQmF0Y2hSZXNwb25zZS5TdHJlYW1F", "bnRyeRJSChZhdXhpbGlhcnlfYm9vbF9zdHJlYW1zGAIgAygLMjIub3JyYi5S", "ZW5kZXJCYXRjaFJlc3BvbnNlLkF1eGlsaWFyeUJvb2xTdHJlYW1FbnRyeRJQ", "ChVhdXhpbGlhcnlfaW50X3N0cmVhbXMYAyADKAsyMS5vcnJiLlJlbmRlckJh", "dGNoUmVzcG9uc2UuQXV4aWxpYXJ5SW50U3RyZWFtRW50cnkSVAoXYXV4aWxp", "YXJ5X2Zsb2F0X3N0cmVhbXMYBCADKAsyMy5vcnJiLlJlbmRlckJhdGNoUmVz", "cG9uc2UuQXV4aWxpYXJ5RmxvYXRTdHJlYW1FbnRyeRrVAQoLU3RyZWFtRW50", "cnkSDAoEbmFtZRgBIAEoCRJJCgdlbnRyaWVzGAIgAygLMjgub3JyYi5SZW5k", "ZXJCYXRjaFJlc3BvbnNlLlN0cmVhbUVudHJ5LkJhdGNoUmVzcG9uc2VFbnRy", "eRptChJCYXRjaFJlc3BvbnNlRW50cnkSEgoKaW1hZ2VfZGF0YRgBIAEoDBIS", "CgpkZXB0aF9kYXRhGAIgASgMEhQKDG5vcm1hbHNfZGF0YRgDIAEoDBIZChFz", "ZWdtZW50YXRpb25fZGF0YRgEIAEoDBo2ChhBdXhpbGlhcnlCb29sU3RyZWFt", "RW50cnkSDAoEbmFtZRgBIAEoCRIMCgRkYXRhGAIgAygIGjUKF0F1eGlsaWFy", "eUludFN0cmVhbUVudHJ5EgwKBG5hbWUYASABKAkSDAoEZGF0YRgCIAMoBRo3", "ChlBdXhpbGlhcnlGbG9hdFN0cmVhbUVudHJ5EgwKBG5hbWUYASABKAkSDAoE", "ZGF0YRgCIAMoAiI8Cg1VcGRhdGVSZXF1ZXN0EisKCmNvbXBvbmVudHMYASAD", "KAsyFy5vcnJiLlJlbmRlcmVyQ29tcG9uZW50IiAKDlVwZGF0ZVJlc3BvbnNl", "Eg4KBmVycm9ycxgBIAMoCTKMAQoNUmVuZGVyU2VydmljZRJECgtSZW5kZXJC", "YXRjaBIYLm9ycmIuUmVuZGVyQmF0Y2hSZXF1ZXN0Ghkub3JyYi5SZW5kZXJC", "YXRjaFJlc3BvbnNlIgASNQoGVXBkYXRlEhMub3JyYi5VcGRhdGVSZXF1ZXN0", "GhQub3JyYi5VcGRhdGVSZXNwb25zZSIAYgZwcm90bzM=")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::Orrb.RendererConfigReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.RenderBatchRequest), global::Orrb.RenderBatchRequest.Parser, new[]{ "Entries", "Width", "Height", "SceneId", "CameraNames", "BatchSeed", "UseEntrySeeds", "RenderAlpha", "RenderDepth", "RenderNormals", "RenderSegmentation" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.RenderBatchRequest.Types.BatchRequestEntry), global::Orrb.RenderBatchRequest.Types.BatchRequestEntry.Parser, new[]{ "Qpos", "Seed" }, null, null, null)}), new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.RenderBatchResponse), global::Orrb.RenderBatchResponse.Parser, new[]{ "Streams", "AuxiliaryBoolStreams", "AuxiliaryIntStreams", "AuxiliaryFloatStreams" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.RenderBatchResponse.Types.StreamEntry), global::Orrb.RenderBatchResponse.Types.StreamEntry.Parser, new[]{ "Name", "Entries" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.RenderBatchResponse.Types.StreamEntry.Types.BatchResponseEntry), global::Orrb.RenderBatchResponse.Types.StreamEntry.Types.BatchResponseEntry.Parser, new[]{ "ImageData", "DepthData", "NormalsData", "SegmentationData" }, null, null, null)}), new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.RenderBatchResponse.Types.AuxiliaryBoolStreamEntry), global::Orrb.RenderBatchResponse.Types.AuxiliaryBoolStreamEntry.Parser, new[]{ "Name", "Data" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.RenderBatchResponse.Types.AuxiliaryIntStreamEntry), global::Orrb.RenderBatchResponse.Types.AuxiliaryIntStreamEntry.Parser, new[]{ "Name", "Data" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.RenderBatchResponse.Types.AuxiliaryFloatStreamEntry), global::Orrb.RenderBatchResponse.Types.AuxiliaryFloatStreamEntry.Parser, new[]{ "Name", "Data" }, null, null, null)}), new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.UpdateRequest), global::Orrb.UpdateRequest.Parser, new[]{ "Components" }, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Orrb.UpdateResponse), global::Orrb.UpdateResponse.Parser, new[]{ "Errors" }, null, null, null) })); } #endregion } #region Messages public sealed partial class RenderBatchRequest : pb::IMessage<RenderBatchRequest> { private static readonly pb::MessageParser<RenderBatchRequest> _parser = new pb::MessageParser<RenderBatchRequest>(() => new RenderBatchRequest()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<RenderBatchRequest> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RenderServiceReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RenderBatchRequest() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RenderBatchRequest(RenderBatchRequest other) : this() { entries_ = other.entries_.Clone(); width_ = other.width_; height_ = other.height_; sceneId_ = other.sceneId_; cameraNames_ = other.cameraNames_.Clone(); batchSeed_ = other.batchSeed_; useEntrySeeds_ = other.useEntrySeeds_; renderAlpha_ = other.renderAlpha_; renderDepth_ = other.renderDepth_; renderNormals_ = other.renderNormals_; renderSegmentation_ = other.renderSegmentation_; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RenderBatchRequest Clone() { return new RenderBatchRequest(this); } /// <summary>Field number for the "entries" field.</summary> public const int EntriesFieldNumber = 1; private static readonly pb::FieldCodec<global::Orrb.RenderBatchRequest.Types.BatchRequestEntry> _repeated_entries_codec = pb::FieldCodec.ForMessage(10, global::Orrb.RenderBatchRequest.Types.BatchRequestEntry.Parser); private readonly pbc::RepeatedField<global::Orrb.RenderBatchRequest.Types.BatchRequestEntry> entries_ = new pbc::RepeatedField<global::Orrb.RenderBatchRequest.Types.BatchRequestEntry>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<global::Orrb.RenderBatchRequest.Types.BatchRequestEntry> Entries { get { return entries_; } } /// <summary>Field number for the "width" field.</summary> public const int WidthFieldNumber = 2; private int width_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int Width { get { return width_; } set { width_ = value; } } /// <summary>Field number for the "height" field.</summary> public const int HeightFieldNumber = 3; private int height_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int Height { get { return height_; } set { height_ = value; } } /// <summary>Field number for the "scene_id" field.</summary> public const int SceneIdFieldNumber = 4; private int sceneId_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int SceneId { get { return sceneId_; } set { sceneId_ = value; } } /// <summary>Field number for the "camera_names" field.</summary> public const int CameraNamesFieldNumber = 5; private static readonly pb::FieldCodec<string> _repeated_cameraNames_codec = pb::FieldCodec.ForString(42); private readonly pbc::RepeatedField<string> cameraNames_ = new pbc::RepeatedField<string>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<string> CameraNames { get { return cameraNames_; } } /// <summary>Field number for the "batch_seed" field.</summary> public const int BatchSeedFieldNumber = 6; private int batchSeed_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int BatchSeed { get { return batchSeed_; } set { batchSeed_ = value; } } /// <summary>Field number for the "use_entry_seeds" field.</summary> public const int UseEntrySeedsFieldNumber = 7; private bool useEntrySeeds_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool UseEntrySeeds { get { return useEntrySeeds_; } set { useEntrySeeds_ = value; } } /// <summary>Field number for the "render_alpha" field.</summary> public const int RenderAlphaFieldNumber = 8; private bool renderAlpha_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool RenderAlpha { get { return renderAlpha_; } set { renderAlpha_ = value; } } /// <summary>Field number for the "render_depth" field.</summary> public const int RenderDepthFieldNumber = 9; private bool renderDepth_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool RenderDepth { get { return renderDepth_; } set { renderDepth_ = value; } } /// <summary>Field number for the "render_normals" field.</summary> public const int RenderNormalsFieldNumber = 10; private bool renderNormals_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool RenderNormals { get { return renderNormals_; } set { renderNormals_ = value; } } /// <summary>Field number for the "render_segmentation" field.</summary> public const int RenderSegmentationFieldNumber = 11; private bool renderSegmentation_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool RenderSegmentation { get { return renderSegmentation_; } set { renderSegmentation_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as RenderBatchRequest); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(RenderBatchRequest other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if(!entries_.Equals(other.entries_)) return false; if (Width != other.Width) return false; if (Height != other.Height) return false; if (SceneId != other.SceneId) return false; if(!cameraNames_.Equals(other.cameraNames_)) return false; if (BatchSeed != other.BatchSeed) return false; if (UseEntrySeeds != other.UseEntrySeeds) return false; if (RenderAlpha != other.RenderAlpha) return false; if (RenderDepth != other.RenderDepth) return false; if (RenderNormals != other.RenderNormals) return false; if (RenderSegmentation != other.RenderSegmentation) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; hash ^= entries_.GetHashCode(); if (Width != 0) hash ^= Width.GetHashCode(); if (Height != 0) hash ^= Height.GetHashCode(); if (SceneId != 0) hash ^= SceneId.GetHashCode(); hash ^= cameraNames_.GetHashCode(); if (BatchSeed != 0) hash ^= BatchSeed.GetHashCode(); if (UseEntrySeeds != false) hash ^= UseEntrySeeds.GetHashCode(); if (RenderAlpha != false) hash ^= RenderAlpha.GetHashCode(); if (RenderDepth != false) hash ^= RenderDepth.GetHashCode(); if (RenderNormals != false) hash ^= RenderNormals.GetHashCode(); if (RenderSegmentation != false) hash ^= RenderSegmentation.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { entries_.WriteTo(output, _repeated_entries_codec); if (Width != 0) { output.WriteRawTag(16); output.WriteInt32(Width); } if (Height != 0) { output.WriteRawTag(24); output.WriteInt32(Height); } if (SceneId != 0) { output.WriteRawTag(32); output.WriteInt32(SceneId); } cameraNames_.WriteTo(output, _repeated_cameraNames_codec); if (BatchSeed != 0) { output.WriteRawTag(48); output.WriteInt32(BatchSeed); } if (UseEntrySeeds != false) { output.WriteRawTag(56); output.WriteBool(UseEntrySeeds); } if (RenderAlpha != false) { output.WriteRawTag(64); output.WriteBool(RenderAlpha); } if (RenderDepth != false) { output.WriteRawTag(72); output.WriteBool(RenderDepth); } if (RenderNormals != false) { output.WriteRawTag(80); output.WriteBool(RenderNormals); } if (RenderSegmentation != false) { output.WriteRawTag(88); output.WriteBool(RenderSegmentation); } if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; size += entries_.CalculateSize(_repeated_entries_codec); if (Width != 0) { size += 1 + pb::CodedOutputStream.ComputeInt32Size(Width); } if (Height != 0) { size += 1 + pb::CodedOutputStream.ComputeInt32Size(Height); } if (SceneId != 0) { size += 1 + pb::CodedOutputStream.ComputeInt32Size(SceneId); } size += cameraNames_.CalculateSize(_repeated_cameraNames_codec); if (BatchSeed != 0) { size += 1 + pb::CodedOutputStream.ComputeInt32Size(BatchSeed); } if (UseEntrySeeds != false) { size += 1 + 1; } if (RenderAlpha != false) { size += 1 + 1; } if (RenderDepth != false) { size += 1 + 1; } if (RenderNormals != false) { size += 1 + 1; } if (RenderSegmentation != false) { size += 1 + 1; } if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(RenderBatchRequest other) { if (other == null) { return; } entries_.Add(other.entries_); if (other.Width != 0) { Width = other.Width; } if (other.Height != 0) { Height = other.Height; } if (other.SceneId != 0) { SceneId = other.SceneId; } cameraNames_.Add(other.cameraNames_); if (other.BatchSeed != 0) { BatchSeed = other.BatchSeed; } if (other.UseEntrySeeds != false) { UseEntrySeeds = other.UseEntrySeeds; } if (other.RenderAlpha != false) { RenderAlpha = other.RenderAlpha; } if (other.RenderDepth != false) { RenderDepth = other.RenderDepth; } if (other.RenderNormals != false) { RenderNormals = other.RenderNormals; } if (other.RenderSegmentation != false) { RenderSegmentation = other.RenderSegmentation; } _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 10: { entries_.AddEntriesFrom(input, _repeated_entries_codec); break; } case 16: { Width = input.ReadInt32(); break; } case 24: { Height = input.ReadInt32(); break; } case 32: { SceneId = input.ReadInt32(); break; } case 42: { cameraNames_.AddEntriesFrom(input, _repeated_cameraNames_codec); break; } case 48: { BatchSeed = input.ReadInt32(); break; } case 56: { UseEntrySeeds = input.ReadBool(); break; } case 64: { RenderAlpha = input.ReadBool(); break; } case 72: { RenderDepth = input.ReadBool(); break; } case 80: { RenderNormals = input.ReadBool(); break; } case 88: { RenderSegmentation = input.ReadBool(); break; } } } } #region Nested types /// <summary>Container for nested types declared in the RenderBatchRequest message type.</summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static partial class Types { public sealed partial class BatchRequestEntry : pb::IMessage<BatchRequestEntry> { private static readonly pb::MessageParser<BatchRequestEntry> _parser = new pb::MessageParser<BatchRequestEntry>(() => new BatchRequestEntry()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<BatchRequestEntry> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RenderBatchRequest.Descriptor.NestedTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public BatchRequestEntry() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public BatchRequestEntry(BatchRequestEntry other) : this() { qpos_ = other.qpos_.Clone(); seed_ = other.seed_; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public BatchRequestEntry Clone() { return new BatchRequestEntry(this); } /// <summary>Field number for the "qpos" field.</summary> public const int QposFieldNumber = 1; private static readonly pb::FieldCodec<float> _repeated_qpos_codec = pb::FieldCodec.ForFloat(10); private readonly pbc::RepeatedField<float> qpos_ = new pbc::RepeatedField<float>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<float> Qpos { get { return qpos_; } } /// <summary>Field number for the "seed" field.</summary> public const int SeedFieldNumber = 2; private int seed_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int Seed { get { return seed_; } set { seed_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as BatchRequestEntry); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(BatchRequestEntry other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if(!qpos_.Equals(other.qpos_)) return false; if (Seed != other.Seed) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; hash ^= qpos_.GetHashCode(); if (Seed != 0) hash ^= Seed.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { qpos_.WriteTo(output, _repeated_qpos_codec); if (Seed != 0) { output.WriteRawTag(16); output.WriteInt32(Seed); } if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; size += qpos_.CalculateSize(_repeated_qpos_codec); if (Seed != 0) { size += 1 + pb::CodedOutputStream.ComputeInt32Size(Seed); } if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(BatchRequestEntry other) { if (other == null) { return; } qpos_.Add(other.qpos_); if (other.Seed != 0) { Seed = other.Seed; } _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 10: case 13: { qpos_.AddEntriesFrom(input, _repeated_qpos_codec); break; } case 16: { Seed = input.ReadInt32(); break; } } } } } } #endregion } public sealed partial class RenderBatchResponse : pb::IMessage<RenderBatchResponse> { private static readonly pb::MessageParser<RenderBatchResponse> _parser = new pb::MessageParser<RenderBatchResponse>(() => new RenderBatchResponse()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<RenderBatchResponse> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RenderServiceReflection.Descriptor.MessageTypes[1]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RenderBatchResponse() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RenderBatchResponse(RenderBatchResponse other) : this() { streams_ = other.streams_.Clone(); auxiliaryBoolStreams_ = other.auxiliaryBoolStreams_.Clone(); auxiliaryIntStreams_ = other.auxiliaryIntStreams_.Clone(); auxiliaryFloatStreams_ = other.auxiliaryFloatStreams_.Clone(); _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public RenderBatchResponse Clone() { return new RenderBatchResponse(this); } /// <summary>Field number for the "streams" field.</summary> public const int StreamsFieldNumber = 1; private static readonly pb::FieldCodec<global::Orrb.RenderBatchResponse.Types.StreamEntry> _repeated_streams_codec = pb::FieldCodec.ForMessage(10, global::Orrb.RenderBatchResponse.Types.StreamEntry.Parser); private readonly pbc::RepeatedField<global::Orrb.RenderBatchResponse.Types.StreamEntry> streams_ = new pbc::RepeatedField<global::Orrb.RenderBatchResponse.Types.StreamEntry>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<global::Orrb.RenderBatchResponse.Types.StreamEntry> Streams { get { return streams_; } } /// <summary>Field number for the "auxiliary_bool_streams" field.</summary> public const int AuxiliaryBoolStreamsFieldNumber = 2; private static readonly pb::FieldCodec<global::Orrb.RenderBatchResponse.Types.AuxiliaryBoolStreamEntry> _repeated_auxiliaryBoolStreams_codec = pb::FieldCodec.ForMessage(18, global::Orrb.RenderBatchResponse.Types.AuxiliaryBoolStreamEntry.Parser); private readonly pbc::RepeatedField<global::Orrb.RenderBatchResponse.Types.AuxiliaryBoolStreamEntry> auxiliaryBoolStreams_ = new pbc::RepeatedField<global::Orrb.RenderBatchResponse.Types.AuxiliaryBoolStreamEntry>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<global::Orrb.RenderBatchResponse.Types.AuxiliaryBoolStreamEntry> AuxiliaryBoolStreams { get { return auxiliaryBoolStreams_; } } /// <summary>Field number for the "auxiliary_int_streams" field.</summary> public const int AuxiliaryIntStreamsFieldNumber = 3; private static readonly pb::FieldCodec<global::Orrb.RenderBatchResponse.Types.AuxiliaryIntStreamEntry> _repeated_auxiliaryIntStreams_codec = pb::FieldCodec.ForMessage(26, global::Orrb.RenderBatchResponse.Types.AuxiliaryIntStreamEntry.Parser); private readonly pbc::RepeatedField<global::Orrb.RenderBatchResponse.Types.AuxiliaryIntStreamEntry> auxiliaryIntStreams_ = new pbc::RepeatedField<global::Orrb.RenderBatchResponse.Types.AuxiliaryIntStreamEntry>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<global::Orrb.RenderBatchResponse.Types.AuxiliaryIntStreamEntry> AuxiliaryIntStreams { get { return auxiliaryIntStreams_; } } /// <summary>Field number for the "auxiliary_float_streams" field.</summary> public const int AuxiliaryFloatStreamsFieldNumber = 4; private static readonly pb::FieldCodec<global::Orrb.RenderBatchResponse.Types.AuxiliaryFloatStreamEntry> _repeated_auxiliaryFloatStreams_codec = pb::FieldCodec.ForMessage(34, global::Orrb.RenderBatchResponse.Types.AuxiliaryFloatStreamEntry.Parser); private readonly pbc::RepeatedField<global::Orrb.RenderBatchResponse.Types.AuxiliaryFloatStreamEntry> auxiliaryFloatStreams_ = new pbc::RepeatedField<global::Orrb.RenderBatchResponse.Types.AuxiliaryFloatStreamEntry>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<global::Orrb.RenderBatchResponse.Types.AuxiliaryFloatStreamEntry> AuxiliaryFloatStreams { get { return auxiliaryFloatStreams_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as RenderBatchResponse); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(RenderBatchResponse other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if(!streams_.Equals(other.streams_)) return false; if(!auxiliaryBoolStreams_.Equals(other.auxiliaryBoolStreams_)) return false; if(!auxiliaryIntStreams_.Equals(other.auxiliaryIntStreams_)) return false; if(!auxiliaryFloatStreams_.Equals(other.auxiliaryFloatStreams_)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; hash ^= streams_.GetHashCode(); hash ^= auxiliaryBoolStreams_.GetHashCode(); hash ^= auxiliaryIntStreams_.GetHashCode(); hash ^= auxiliaryFloatStreams_.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { streams_.WriteTo(output, _repeated_streams_codec); auxiliaryBoolStreams_.WriteTo(output, _repeated_auxiliaryBoolStreams_codec); auxiliaryIntStreams_.WriteTo(output, _repeated_auxiliaryIntStreams_codec); auxiliaryFloatStreams_.WriteTo(output, _repeated_auxiliaryFloatStreams_codec); if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; size += streams_.CalculateSize(_repeated_streams_codec); size += auxiliaryBoolStreams_.CalculateSize(_repeated_auxiliaryBoolStreams_codec); size += auxiliaryIntStreams_.CalculateSize(_repeated_auxiliaryIntStreams_codec); size += auxiliaryFloatStreams_.CalculateSize(_repeated_auxiliaryFloatStreams_codec); if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(RenderBatchResponse other) { if (other == null) { return; } streams_.Add(other.streams_); auxiliaryBoolStreams_.Add(other.auxiliaryBoolStreams_); auxiliaryIntStreams_.Add(other.auxiliaryIntStreams_); auxiliaryFloatStreams_.Add(other.auxiliaryFloatStreams_); _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 10: { streams_.AddEntriesFrom(input, _repeated_streams_codec); break; } case 18: { auxiliaryBoolStreams_.AddEntriesFrom(input, _repeated_auxiliaryBoolStreams_codec); break; } case 26: { auxiliaryIntStreams_.AddEntriesFrom(input, _repeated_auxiliaryIntStreams_codec); break; } case 34: { auxiliaryFloatStreams_.AddEntriesFrom(input, _repeated_auxiliaryFloatStreams_codec); break; } } } } #region Nested types /// <summary>Container for nested types declared in the RenderBatchResponse message type.</summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static partial class Types { public sealed partial class StreamEntry : pb::IMessage<StreamEntry> { private static readonly pb::MessageParser<StreamEntry> _parser = new pb::MessageParser<StreamEntry>(() => new StreamEntry()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<StreamEntry> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RenderBatchResponse.Descriptor.NestedTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public StreamEntry() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public StreamEntry(StreamEntry other) : this() { name_ = other.name_; entries_ = other.entries_.Clone(); _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public StreamEntry Clone() { return new StreamEntry(this); } /// <summary>Field number for the "name" field.</summary> public const int NameFieldNumber = 1; private string name_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Name { get { return name_; } set { name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "entries" field.</summary> public const int EntriesFieldNumber = 2; private static readonly pb::FieldCodec<global::Orrb.RenderBatchResponse.Types.StreamEntry.Types.BatchResponseEntry> _repeated_entries_codec = pb::FieldCodec.ForMessage(18, global::Orrb.RenderBatchResponse.Types.StreamEntry.Types.BatchResponseEntry.Parser); private readonly pbc::RepeatedField<global::Orrb.RenderBatchResponse.Types.StreamEntry.Types.BatchResponseEntry> entries_ = new pbc::RepeatedField<global::Orrb.RenderBatchResponse.Types.StreamEntry.Types.BatchResponseEntry>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<global::Orrb.RenderBatchResponse.Types.StreamEntry.Types.BatchResponseEntry> Entries { get { return entries_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as StreamEntry); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(StreamEntry other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Name != other.Name) return false; if(!entries_.Equals(other.entries_)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (Name.Length != 0) hash ^= Name.GetHashCode(); hash ^= entries_.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (Name.Length != 0) { output.WriteRawTag(10); output.WriteString(Name); } entries_.WriteTo(output, _repeated_entries_codec); if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (Name.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); } size += entries_.CalculateSize(_repeated_entries_codec); if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(StreamEntry other) { if (other == null) { return; } if (other.Name.Length != 0) { Name = other.Name; } entries_.Add(other.entries_); _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 10: { Name = input.ReadString(); break; } case 18: { entries_.AddEntriesFrom(input, _repeated_entries_codec); break; } } } } #region Nested types /// <summary>Container for nested types declared in the StreamEntry message type.</summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static partial class Types { public sealed partial class BatchResponseEntry : pb::IMessage<BatchResponseEntry> { private static readonly pb::MessageParser<BatchResponseEntry> _parser = new pb::MessageParser<BatchResponseEntry>(() => new BatchResponseEntry()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<BatchResponseEntry> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RenderBatchResponse.Types.StreamEntry.Descriptor.NestedTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public BatchResponseEntry() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public BatchResponseEntry(BatchResponseEntry other) : this() { imageData_ = other.imageData_; depthData_ = other.depthData_; normalsData_ = other.normalsData_; segmentationData_ = other.segmentationData_; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public BatchResponseEntry Clone() { return new BatchResponseEntry(this); } /// <summary>Field number for the "image_data" field.</summary> public const int ImageDataFieldNumber = 1; private pb::ByteString imageData_ = pb::ByteString.Empty; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pb::ByteString ImageData { get { return imageData_; } set { imageData_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "depth_data" field.</summary> public const int DepthDataFieldNumber = 2; private pb::ByteString depthData_ = pb::ByteString.Empty; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pb::ByteString DepthData { get { return depthData_; } set { depthData_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "normals_data" field.</summary> public const int NormalsDataFieldNumber = 3; private pb::ByteString normalsData_ = pb::ByteString.Empty; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pb::ByteString NormalsData { get { return normalsData_; } set { normalsData_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "segmentation_data" field.</summary> public const int SegmentationDataFieldNumber = 4; private pb::ByteString segmentationData_ = pb::ByteString.Empty; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pb::ByteString SegmentationData { get { return segmentationData_; } set { segmentationData_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as BatchResponseEntry); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(BatchResponseEntry other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (ImageData != other.ImageData) return false; if (DepthData != other.DepthData) return false; if (NormalsData != other.NormalsData) return false; if (SegmentationData != other.SegmentationData) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (ImageData.Length != 0) hash ^= ImageData.GetHashCode(); if (DepthData.Length != 0) hash ^= DepthData.GetHashCode(); if (NormalsData.Length != 0) hash ^= NormalsData.GetHashCode(); if (SegmentationData.Length != 0) hash ^= SegmentationData.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (ImageData.Length != 0) { output.WriteRawTag(10); output.WriteBytes(ImageData); } if (DepthData.Length != 0) { output.WriteRawTag(18); output.WriteBytes(DepthData); } if (NormalsData.Length != 0) { output.WriteRawTag(26); output.WriteBytes(NormalsData); } if (SegmentationData.Length != 0) { output.WriteRawTag(34); output.WriteBytes(SegmentationData); } if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (ImageData.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeBytesSize(ImageData); } if (DepthData.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeBytesSize(DepthData); } if (NormalsData.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeBytesSize(NormalsData); } if (SegmentationData.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeBytesSize(SegmentationData); } if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(BatchResponseEntry other) { if (other == null) { return; } if (other.ImageData.Length != 0) { ImageData = other.ImageData; } if (other.DepthData.Length != 0) { DepthData = other.DepthData; } if (other.NormalsData.Length != 0) { NormalsData = other.NormalsData; } if (other.SegmentationData.Length != 0) { SegmentationData = other.SegmentationData; } _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 10: { ImageData = input.ReadBytes(); break; } case 18: { DepthData = input.ReadBytes(); break; } case 26: { NormalsData = input.ReadBytes(); break; } case 34: { SegmentationData = input.ReadBytes(); break; } } } } } } #endregion } public sealed partial class AuxiliaryBoolStreamEntry : pb::IMessage<AuxiliaryBoolStreamEntry> { private static readonly pb::MessageParser<AuxiliaryBoolStreamEntry> _parser = new pb::MessageParser<AuxiliaryBoolStreamEntry>(() => new AuxiliaryBoolStreamEntry()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<AuxiliaryBoolStreamEntry> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RenderBatchResponse.Descriptor.NestedTypes[1]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public AuxiliaryBoolStreamEntry() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public AuxiliaryBoolStreamEntry(AuxiliaryBoolStreamEntry other) : this() { name_ = other.name_; data_ = other.data_.Clone(); _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public AuxiliaryBoolStreamEntry Clone() { return new AuxiliaryBoolStreamEntry(this); } /// <summary>Field number for the "name" field.</summary> public const int NameFieldNumber = 1; private string name_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Name { get { return name_; } set { name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "data" field.</summary> public const int DataFieldNumber = 2; private static readonly pb::FieldCodec<bool> _repeated_data_codec = pb::FieldCodec.ForBool(18); private readonly pbc::RepeatedField<bool> data_ = new pbc::RepeatedField<bool>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<bool> Data { get { return data_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as AuxiliaryBoolStreamEntry); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(AuxiliaryBoolStreamEntry other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Name != other.Name) return false; if(!data_.Equals(other.data_)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (Name.Length != 0) hash ^= Name.GetHashCode(); hash ^= data_.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (Name.Length != 0) { output.WriteRawTag(10); output.WriteString(Name); } data_.WriteTo(output, _repeated_data_codec); if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (Name.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); } size += data_.CalculateSize(_repeated_data_codec); if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(AuxiliaryBoolStreamEntry other) { if (other == null) { return; } if (other.Name.Length != 0) { Name = other.Name; } data_.Add(other.data_); _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 10: { Name = input.ReadString(); break; } case 18: case 16: { data_.AddEntriesFrom(input, _repeated_data_codec); break; } } } } } public sealed partial class AuxiliaryIntStreamEntry : pb::IMessage<AuxiliaryIntStreamEntry> { private static readonly pb::MessageParser<AuxiliaryIntStreamEntry> _parser = new pb::MessageParser<AuxiliaryIntStreamEntry>(() => new AuxiliaryIntStreamEntry()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<AuxiliaryIntStreamEntry> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RenderBatchResponse.Descriptor.NestedTypes[2]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public AuxiliaryIntStreamEntry() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public AuxiliaryIntStreamEntry(AuxiliaryIntStreamEntry other) : this() { name_ = other.name_; data_ = other.data_.Clone(); _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public AuxiliaryIntStreamEntry Clone() { return new AuxiliaryIntStreamEntry(this); } /// <summary>Field number for the "name" field.</summary> public const int NameFieldNumber = 1; private string name_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Name { get { return name_; } set { name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "data" field.</summary> public const int DataFieldNumber = 2; private static readonly pb::FieldCodec<int> _repeated_data_codec = pb::FieldCodec.ForInt32(18); private readonly pbc::RepeatedField<int> data_ = new pbc::RepeatedField<int>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<int> Data { get { return data_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as AuxiliaryIntStreamEntry); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(AuxiliaryIntStreamEntry other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Name != other.Name) return false; if(!data_.Equals(other.data_)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (Name.Length != 0) hash ^= Name.GetHashCode(); hash ^= data_.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (Name.Length != 0) { output.WriteRawTag(10); output.WriteString(Name); } data_.WriteTo(output, _repeated_data_codec); if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (Name.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); } size += data_.CalculateSize(_repeated_data_codec); if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(AuxiliaryIntStreamEntry other) { if (other == null) { return; } if (other.Name.Length != 0) { Name = other.Name; } data_.Add(other.data_); _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 10: { Name = input.ReadString(); break; } case 18: case 16: { data_.AddEntriesFrom(input, _repeated_data_codec); break; } } } } } public sealed partial class AuxiliaryFloatStreamEntry : pb::IMessage<AuxiliaryFloatStreamEntry> { private static readonly pb::MessageParser<AuxiliaryFloatStreamEntry> _parser = new pb::MessageParser<AuxiliaryFloatStreamEntry>(() => new AuxiliaryFloatStreamEntry()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<AuxiliaryFloatStreamEntry> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RenderBatchResponse.Descriptor.NestedTypes[3]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public AuxiliaryFloatStreamEntry() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public AuxiliaryFloatStreamEntry(AuxiliaryFloatStreamEntry other) : this() { name_ = other.name_; data_ = other.data_.Clone(); _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public AuxiliaryFloatStreamEntry Clone() { return new AuxiliaryFloatStreamEntry(this); } /// <summary>Field number for the "name" field.</summary> public const int NameFieldNumber = 1; private string name_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Name { get { return name_; } set { name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "data" field.</summary> public const int DataFieldNumber = 2; private static readonly pb::FieldCodec<float> _repeated_data_codec = pb::FieldCodec.ForFloat(18); private readonly pbc::RepeatedField<float> data_ = new pbc::RepeatedField<float>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<float> Data { get { return data_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as AuxiliaryFloatStreamEntry); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(AuxiliaryFloatStreamEntry other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Name != other.Name) return false; if(!data_.Equals(other.data_)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (Name.Length != 0) hash ^= Name.GetHashCode(); hash ^= data_.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (Name.Length != 0) { output.WriteRawTag(10); output.WriteString(Name); } data_.WriteTo(output, _repeated_data_codec); if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (Name.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); } size += data_.CalculateSize(_repeated_data_codec); if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(AuxiliaryFloatStreamEntry other) { if (other == null) { return; } if (other.Name.Length != 0) { Name = other.Name; } data_.Add(other.data_); _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 10: { Name = input.ReadString(); break; } case 18: case 21: { data_.AddEntriesFrom(input, _repeated_data_codec); break; } } } } } } #endregion } public sealed partial class UpdateRequest : pb::IMessage<UpdateRequest> { private static readonly pb::MessageParser<UpdateRequest> _parser = new pb::MessageParser<UpdateRequest>(() => new UpdateRequest()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<UpdateRequest> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RenderServiceReflection.Descriptor.MessageTypes[2]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public UpdateRequest() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public UpdateRequest(UpdateRequest other) : this() { components_ = other.components_.Clone(); _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public UpdateRequest Clone() { return new UpdateRequest(this); } /// <summary>Field number for the "components" field.</summary> public const int ComponentsFieldNumber = 1; private static readonly pb::FieldCodec<global::Orrb.RendererComponent> _repeated_components_codec = pb::FieldCodec.ForMessage(10, global::Orrb.RendererComponent.Parser); private readonly pbc::RepeatedField<global::Orrb.RendererComponent> components_ = new pbc::RepeatedField<global::Orrb.RendererComponent>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<global::Orrb.RendererComponent> Components { get { return components_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as UpdateRequest); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(UpdateRequest other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if(!components_.Equals(other.components_)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; hash ^= components_.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { components_.WriteTo(output, _repeated_components_codec); if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; size += components_.CalculateSize(_repeated_components_codec); if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(UpdateRequest other) { if (other == null) { return; } components_.Add(other.components_); _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 10: { components_.AddEntriesFrom(input, _repeated_components_codec); break; } } } } } public sealed partial class UpdateResponse : pb::IMessage<UpdateResponse> { private static readonly pb::MessageParser<UpdateResponse> _parser = new pb::MessageParser<UpdateResponse>(() => new UpdateResponse()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<UpdateResponse> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Orrb.RenderServiceReflection.Descriptor.MessageTypes[3]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public UpdateResponse() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public UpdateResponse(UpdateResponse other) : this() { errors_ = other.errors_.Clone(); _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public UpdateResponse Clone() { return new UpdateResponse(this); } /// <summary>Field number for the "errors" field.</summary> public const int ErrorsFieldNumber = 1; private static readonly pb::FieldCodec<string> _repeated_errors_codec = pb::FieldCodec.ForString(10); private readonly pbc::RepeatedField<string> errors_ = new pbc::RepeatedField<string>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<string> Errors { get { return errors_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as UpdateResponse); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(UpdateResponse other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if(!errors_.Equals(other.errors_)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; hash ^= errors_.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { errors_.WriteTo(output, _repeated_errors_codec); if (_unknownFields != null) { _unknownFields.WriteTo(output); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; size += errors_.CalculateSize(_repeated_errors_codec); if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(UpdateResponse other) { if (other == null) { return; } errors_.Add(other.errors_); _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 10: { errors_.AddEntriesFrom(input, _repeated_errors_codec); break; } } } } } #endregion } #endregion Designer generated code
1,878
orrb
openai
C#
// <auto-generated> // Generated by the protocol buffer compiler. DO NOT EDIT! // source: orrb/protos/RenderService.proto // </auto-generated> #pragma warning disable 0414, 1591 #region Designer generated code using grpc = global::Grpc.Core; namespace Orrb { public static partial class RenderService { static readonly string __ServiceName = "orrb.RenderService"; static readonly grpc::Marshaller<global::Orrb.RenderBatchRequest> __Marshaller_orrb_RenderBatchRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Orrb.RenderBatchRequest.Parser.ParseFrom); static readonly grpc::Marshaller<global::Orrb.RenderBatchResponse> __Marshaller_orrb_RenderBatchResponse = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Orrb.RenderBatchResponse.Parser.ParseFrom); static readonly grpc::Marshaller<global::Orrb.UpdateRequest> __Marshaller_orrb_UpdateRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Orrb.UpdateRequest.Parser.ParseFrom); static readonly grpc::Marshaller<global::Orrb.UpdateResponse> __Marshaller_orrb_UpdateResponse = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Orrb.UpdateResponse.Parser.ParseFrom); static readonly grpc::Method<global::Orrb.RenderBatchRequest, global::Orrb.RenderBatchResponse> __Method_RenderBatch = new grpc::Method<global::Orrb.RenderBatchRequest, global::Orrb.RenderBatchResponse>( grpc::MethodType.Unary, __ServiceName, "RenderBatch", __Marshaller_orrb_RenderBatchRequest, __Marshaller_orrb_RenderBatchResponse); static readonly grpc::Method<global::Orrb.UpdateRequest, global::Orrb.UpdateResponse> __Method_Update = new grpc::Method<global::Orrb.UpdateRequest, global::Orrb.UpdateResponse>( grpc::MethodType.Unary, __ServiceName, "Update", __Marshaller_orrb_UpdateRequest, __Marshaller_orrb_UpdateResponse); /// <summary>Service descriptor</summary> public static global::Google.Protobuf.Reflection.ServiceDescriptor Descriptor { get { return global::Orrb.RenderServiceReflection.Descriptor.Services[0]; } } /// <summary>Base class for server-side implementations of RenderService</summary> public abstract partial class RenderServiceBase { public virtual global::System.Threading.Tasks.Task<global::Orrb.RenderBatchResponse> RenderBatch(global::Orrb.RenderBatchRequest request, grpc::ServerCallContext context) { throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, "")); } public virtual global::System.Threading.Tasks.Task<global::Orrb.UpdateResponse> Update(global::Orrb.UpdateRequest request, grpc::ServerCallContext context) { throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, "")); } } /// <summary>Client for RenderService</summary> public partial class RenderServiceClient : grpc::ClientBase<RenderServiceClient> { /// <summary>Creates a new client for RenderService</summary> /// <param name="channel">The channel to use to make remote calls.</param> public RenderServiceClient(grpc::Channel channel) : base(channel) { } /// <summary>Creates a new client for RenderService that uses a custom <c>CallInvoker</c>.</summary> /// <param name="callInvoker">The callInvoker to use to make remote calls.</param> public RenderServiceClient(grpc::CallInvoker callInvoker) : base(callInvoker) { } /// <summary>Protected parameterless constructor to allow creation of test doubles.</summary> protected RenderServiceClient() : base() { } /// <summary>Protected constructor to allow creation of configured clients.</summary> /// <param name="configuration">The client configuration.</param> protected RenderServiceClient(ClientBaseConfiguration configuration) : base(configuration) { } public virtual global::Orrb.RenderBatchResponse RenderBatch(global::Orrb.RenderBatchRequest request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken)) { return RenderBatch(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } public virtual global::Orrb.RenderBatchResponse RenderBatch(global::Orrb.RenderBatchRequest request, grpc::CallOptions options) { return CallInvoker.BlockingUnaryCall(__Method_RenderBatch, null, options, request); } public virtual grpc::AsyncUnaryCall<global::Orrb.RenderBatchResponse> RenderBatchAsync(global::Orrb.RenderBatchRequest request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken)) { return RenderBatchAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } public virtual grpc::AsyncUnaryCall<global::Orrb.RenderBatchResponse> RenderBatchAsync(global::Orrb.RenderBatchRequest request, grpc::CallOptions options) { return CallInvoker.AsyncUnaryCall(__Method_RenderBatch, null, options, request); } public virtual global::Orrb.UpdateResponse Update(global::Orrb.UpdateRequest request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken)) { return Update(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } public virtual global::Orrb.UpdateResponse Update(global::Orrb.UpdateRequest request, grpc::CallOptions options) { return CallInvoker.BlockingUnaryCall(__Method_Update, null, options, request); } public virtual grpc::AsyncUnaryCall<global::Orrb.UpdateResponse> UpdateAsync(global::Orrb.UpdateRequest request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken)) { return UpdateAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } public virtual grpc::AsyncUnaryCall<global::Orrb.UpdateResponse> UpdateAsync(global::Orrb.UpdateRequest request, grpc::CallOptions options) { return CallInvoker.AsyncUnaryCall(__Method_Update, null, options, request); } /// <summary>Creates a new instance of client from given <c>ClientBaseConfiguration</c>.</summary> protected override RenderServiceClient NewInstance(ClientBaseConfiguration configuration) { return new RenderServiceClient(configuration); } } /// <summary>Creates service definition that can be registered with a server</summary> /// <param name="serviceImpl">An object implementing the server-side handling logic.</param> public static grpc::ServerServiceDefinition BindService(RenderServiceBase serviceImpl) { return grpc::ServerServiceDefinition.CreateBuilder() .AddMethod(__Method_RenderBatch, serviceImpl.RenderBatch) .AddMethod(__Method_Update, serviceImpl.Update).Build(); } } } #endregion
129
orrb
openai
C#
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Text; using Google.Protobuf; using UnityEngine; // This is the main part of the standalone renderer. This script loads // the scene, sets up the server or the interactive mode. public class InteractiveLogic : MonoBehaviour { enum State { Init, MainLoop, Failed, Finished }; public enum Mode { Interactive, Server }; [SerializeField] public SceneManager scene_manager_ = null; [SerializeField] public RenderServer render_server_ = null; [SerializeField] public Recorder recorder_ = null; [SerializeField] [Flag] public Mode mode_ = Mode.Interactive; // The renderer components config file location. [SerializeField] [Flag] public string renderer_config_path_ = null; // The MuJoCo XML scene file location. [SerializeField] [Flag] public string model_xml_path_ = null; // The joint name to qpos index mapping file location. [SerializeField] [Flag] public string model_mapping_path_ = null; // The state data (qposes and images) location for interactive mode. [SerializeField] [Flag] public string model_state_path_ = null; // Assets basedir, used to resolve relative resource paths. [SerializeField] [Flag] public string asset_basedir_ = "."; // Process id of the parent process. Used to shutdown when parent is dead. [SerializeField] [Flag] public int parent_pid_ = -1; private State current_state_ = State.Init; private SceneInstance local_scene_instance_ = null; private List<Camera> scene_cameras_ = new List<Camera>(); private int current_camera_ = 0; private bool show_ui_ = true; private bool anchor_left_ = true; private string config_save_path_ = "config.json"; void Start() { Flags.InitFlags(this, "main"); renderer_config_path_ = ConfigUtils.ResolveFile(asset_basedir_, renderer_config_path_); model_mapping_path_ = ConfigUtils.ResolveFile(asset_basedir_, model_mapping_path_); model_state_path_ = ConfigUtils.ResolveFile(asset_basedir_, model_state_path_); config_save_path_ = string.Format("{0}.new", renderer_config_path_); } void Update() { switch (current_state_) { case State.Init: current_state_ = State.MainLoop; local_scene_instance_ = scene_manager_.CreateSceneInstance(); Orrb.RendererConfig renderer_config = LoadConfig(renderer_config_path_); // If the renderer config contains model and mapping paths, use them. if (renderer_config.ModelXmlPath.Length > 0) { model_xml_path_ = renderer_config.ModelXmlPath; } if (renderer_config.ModelMappingPath.Length > 0) { model_mapping_path_ = renderer_config.ModelMappingPath; } local_scene_instance_.Initialize(model_xml_path_, model_mapping_path_, asset_basedir_); foreach (Orrb.RendererComponent renderer_component in renderer_config.Components) { local_scene_instance_.GetComponentManager().AddComponent( renderer_component.Type, renderer_component.Name, renderer_component.Path, renderer_component.Config, mode_ == Mode.Server // Enable by default in server mode. ); } if (mode_ == Mode.Server) { // In server mode: resize the useless window, start the GRPC // server and turn on capture. Screen.SetResolution(50, 50, false); render_server_.Initialize(recorder_, local_scene_instance_); recorder_.Initialize(render_server_); } else { // In interactive mode: load state from files, set up active // cameras. local_scene_instance_.GetStateLoader().InitializeStateStream(model_state_path_); scene_cameras_ = local_scene_instance_.GetCameras(); ToggleCamera(current_camera_); } if (parent_pid_ != -1) { // If parent pid was provided start a parent watchdog coroutine. StartCoroutine(ParentProcessWatch()); } break; case State.MainLoop: default: if (mode_ == Mode.Server) { render_server_.ProcessRequests(); } else { local_scene_instance_.GetComponentManager().RunComponents(new RendererComponent.NullOutputContext()); } break; } } // This coroutine keeps track of the parent process. If the parent is // dead it will stop the standalone renderer. private IEnumerator ParentProcessWatch() { Process parent = null; try { parent = Process.GetProcessById(parent_pid_); } catch (Exception e) { Logger.Error("Exception geting parent process: {0}, {1}.", parent_pid_, e.Message); Application.Quit(); } if (parent == null) { Logger.Error("Parent process is null: {0}.", parent_pid_); Application.Quit(); } Logger.Info("Starting parent process watchdog: {0}.", parent_pid_); while (true) { yield return new WaitForSecondsRealtime(3); if (parent.HasExited) { Logger.Info("Parent has exited, quitting."); Application.Quit(); yield break; } } } private Orrb.RendererConfig LoadConfig(string path) { return Orrb.RendererConfig.Parser.ParseJson(File.ReadAllText(path)); } private static Orrb.RendererComponentConfig ParseConfig(string config) { return Orrb.RendererComponentConfig.Parser.ParseJson(config.Replace('\'', '"')); } // Loop through cameras, disable all but one selected. private void ToggleCamera(int new_camera) { foreach (Camera scene_camera in scene_cameras_) { scene_camera.enabled = false; } current_camera_ = new_camera; scene_cameras_[current_camera_].enabled = true; } // Make the previous camera active, in interactive mode. private void PreviousCamera() { if (scene_cameras_.Count == 0) { return; } current_camera_ = (current_camera_ + scene_cameras_.Count - 1) % scene_cameras_.Count; ToggleCamera(current_camera_); } // Make the next camera active, in interactive mode. private void NextCamera() { if (scene_cameras_.Count == 0) { return; } current_camera_ = (current_camera_ + 1) % scene_cameras_.Count; ToggleCamera(current_camera_); } void OnGUI() { if (current_state_ != State.MainLoop || mode_ == Mode.Server) { return; } GUI.skin.toggle.fontSize = 12; GUI.skin.label.fontSize = 12; local_scene_instance_.GetComponentManager().DrawSceneGUI(); local_scene_instance_.GetStateLoader().DrawSceneGUI(); GUILayout.BeginArea(new Rect(anchor_left_ ? 10 : Screen.width - 410, 10, 400, Screen.height - 20)); GUILayout.BeginVertical(); GUILayout.BeginHorizontal(); if (!anchor_left_) { GUILayout.FlexibleSpace(); } if (GUILayout.Button(show_ui_ ? "-" : "+", GUILayout.Width(20))) { show_ui_ = !show_ui_; } if (GUILayout.Button(anchor_left_ ? ">" : "<", GUILayout.Width(20))) { anchor_left_ = !anchor_left_; } if (anchor_left_) { GUILayout.FlexibleSpace(); } GUILayout.EndHorizontal(); GUILayout.Space(3); if (show_ui_) { GUILayout.BeginHorizontal(); config_save_path_ = GUILayout.TextField(config_save_path_, GUILayout.Width(310)); if (GUILayout.Button("Save config", GUILayout.Width(80))) { Orrb.RendererConfig config = local_scene_instance_.GetComponentManager().GetConfig(); JsonFormatter formatter = new JsonFormatter(JsonFormatter.Settings.Default); File.WriteAllText(config_save_path_, formatter.Format(config)); } GUILayout.EndHorizontal(); GUILayout.Space(3); GUILayout.BeginHorizontal(); if (GUILayout.Button(" < ", GUILayout.Width(20))) { PreviousCamera(); } if (GUILayout.Button(" > ", GUILayout.Width(20))) { NextCamera(); } GUILayout.Space(10); GUILayout.Label(scene_cameras_.Count > current_camera_ ? scene_cameras_[current_camera_].name : "-", GUILayout.Width(300)); GUILayout.FlexibleSpace(); GUILayout.EndHorizontal(); GUILayout.Space(3); local_scene_instance_.GetStateLoader().DrawEditorGUI(); GUILayout.Space(3); local_scene_instance_.GetComponentManager().DrawEditorGUI(); } GUILayout.FlexibleSpace(); GUILayout.EndVertical(); GUILayout.EndArea(); } }
282
orrb
openai
C#
using System.Text; using System.Collections.Generic; using System.IO; using UnityEngine; using System.Linq; namespace Parabox.STL { /** * Describes the file format of an STL file. */ public enum FileType { Ascii, Binary }; /** * Export STL files from Unity mesh assets. */ public static class pb_Stl { /** * Write a mesh file to STL. */ public static bool WriteFile(string path, Mesh mesh, FileType type = FileType.Ascii, bool convertToRightHandedCoordinates = true) { return WriteFile(path, new Mesh[] { mesh }, type, convertToRightHandedCoordinates); } /** * Write a collection of mesh assets to an STL file. * No transformations are performed on meshes in this method. * Eg, if you want to export a set of a meshes in a transform * hierarchy the meshes should be transformed prior to this call. * * string path - Where to write the file. * IList<Mesh> meshes - The mesh assets to write. * FileType type - How to format the file (in ASCII or binary). */ public static bool WriteFile(string path, IList<Mesh> meshes, FileType type = FileType.Ascii, bool convertToRightHandedCoordinates = true) { try { switch(type) { case FileType.Binary: { // http://paulbourke.net/dataformats/stl/ // http://www.fabbers.com/tech/STL_Format using (BinaryWriter writer = new BinaryWriter(File.Open(path, FileMode.Create), new ASCIIEncoding())) { // 80 byte header writer.Write(new byte[80]); uint totalTriangleCount = (uint) (meshes.Sum(x => x.triangles.Length) / 3); // unsigned long facet count (4 bytes) writer.Write( totalTriangleCount ); foreach(Mesh mesh in meshes) { Vector3[] v = convertToRightHandedCoordinates ? Left2Right(mesh.vertices) : mesh.vertices; Vector3[] n = convertToRightHandedCoordinates ? Left2Right(mesh.normals) : mesh.normals; int[] t = mesh.triangles; int triangleCount = t.Length; if(convertToRightHandedCoordinates) System.Array.Reverse(t); for(int i = 0; i < triangleCount; i += 3) { int a = t[i], b = t[i+1], c = t[i+2]; Vector3 avg = AvgNrm(n[a], n[b], n[c]); writer.Write(avg.x); writer.Write(avg.y); writer.Write(avg.z); writer.Write(v[a].x); writer.Write(v[a].y); writer.Write(v[a].z); writer.Write(v[b].x); writer.Write(v[b].y); writer.Write(v[b].z); writer.Write(v[c].x); writer.Write(v[c].y); writer.Write(v[c].z); // specification says attribute byte count should be set to 0. writer.Write( (ushort)0 ); } } } } break; default: string model = WriteString(meshes); File.WriteAllText(path, model); break; } } catch(System.Exception e) { UnityEngine.Debug.LogError(e.ToString()); return false; } return true; } /** * Write a Unity mesh to an ASCII STL string. */ public static string WriteString(Mesh mesh, bool convertToRightHandedCoordinates = true) { return WriteString(new Mesh[] { mesh }, convertToRightHandedCoordinates); } /** * Write a set of meshes to an ASCII string in STL format. */ public static string WriteString(IList<Mesh> meshes, bool convertToRightHandedCoordinates = true) { StringBuilder sb = new StringBuilder(); string name = meshes.Count == 1 ? meshes[0].name : "Composite Mesh"; sb.AppendLine(string.Format("solid {0}", name)); foreach(Mesh mesh in meshes) { Vector3[] v = convertToRightHandedCoordinates ? Left2Right(mesh.vertices) : mesh.vertices; Vector3[] n = convertToRightHandedCoordinates ? Left2Right(mesh.normals) : mesh.normals; int[] t = mesh.triangles; if(convertToRightHandedCoordinates) System.Array.Reverse(t); int triLen = t.Length; for(int i = 0; i < triLen; i+=3) { int a = t[i]; int b = t[i+1]; int c = t[i+2]; Vector3 nrm = AvgNrm(n[a], n[b], n[c]); sb.AppendLine(string.Format("facet normal {0} {1} {2}", nrm.x, nrm.y, nrm.z)); sb.AppendLine("outer loop"); sb.AppendLine(string.Format("\tvertex {0} {1} {2}", v[a].x, v[a].y, v[a].z)); sb.AppendLine(string.Format("\tvertex {0} {1} {2}", v[b].x, v[b].y, v[b].z)); sb.AppendLine(string.Format("\tvertex {0} {1} {2}", v[c].x, v[c].y, v[c].z)); sb.AppendLine("endloop"); sb.AppendLine("endfacet"); } } sb.AppendLine(string.Format("endsolid {0}", name)); return sb.ToString(); } private static Vector3[] Left2Right(Vector3[] v) { Matrix4x4 l2r = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1f, 1f, -1f)); Vector3[] r = new Vector3[v.Length]; for(int i = 0; i < v.Length; i++) r[i] = l2r.MultiplyPoint3x4(v[i]); return r; } /** * Average of 3 vectors. */ private static Vector3 AvgNrm(Vector3 a, Vector3 b, Vector3 c) { return new Vector3( (a.x + b.x + c.x) / 3f, (a.y + b.y + c.y) / 3f, (a.z + b.z + c.z) / 3f ); } } }
192
orrb
openai
C#
using UnityEngine; using System.Linq; using System.Collections.Generic; namespace Parabox.STL { /** * Provides menu items for writing STL files from a scene selection. */ public static class pb_Stl_Exporter { /** * Export a hierarchy of GameObjects to path with file type. */ public static bool Export(string path, GameObject[] gameObjects, FileType type) { Mesh[] meshes = CreateWorldSpaceMeshesWithTransforms(gameObjects.Select(x => x.transform).ToArray()); bool success = false; if(meshes != null && meshes.Length > 0) { if(!string.IsNullOrEmpty(path)) success = pb_Stl.WriteFile(path, meshes, type); } for(int i = 0; meshes != null && i < meshes.Length; i++) Object.DestroyImmediate(meshes[i]); return success; } /** * Extracts a list of mesh values with their relative transformations intact. */ private static Mesh[] CreateWorldSpaceMeshesWithTransforms(IList<Transform> transforms) { if(transforms == null || transforms.Count < 1) return null; // move root node to center of selection Vector3 p = Vector3.zero; for(int i = 0; i < transforms.Count; i++) p += transforms[i].position; Vector3 mesh_center = p / (float) transforms.Count; GameObject root = new GameObject(); root.name = "ROOT"; root.transform.position = mesh_center; // copy all transforms to new root gameobject foreach(Transform t in transforms) { GameObject go = (GameObject) GameObject.Instantiate(t.gameObject); go.transform.SetParent(t.parent, false); go.transform.SetParent(root.transform, true); } // move root to 0,0,0 so mesh transformations are relative to origin root.transform.position = Vector3.zero; // create new meshes by iterating the root node and transforming vertex & normal // values (ignoring all other mesh attributes since STL doesn't care about them) List<MeshFilter> mfs = root.GetComponentsInChildren<MeshFilter>().Where(x => x.sharedMesh != null).ToList(); int meshCount = mfs.Count; Mesh[] meshes = new Mesh[meshCount]; for(int i = 0; i < meshCount; i++) { Transform t = mfs[i].transform; Vector3[] v = mfs[i].sharedMesh.vertices; Vector3[] n = mfs[i].sharedMesh.normals; for(int it = 0; it < v.Length; it++) { v[it] = t.TransformPoint(v[it]); n[it] = t.TransformDirection(n[it]); } Mesh m = new Mesh(); m.name = mfs[i].name; m.vertices = v; m.normals = n; m.triangles = mfs[i].sharedMesh.triangles; meshes[i] = m; } // Cleanup GameObject.DestroyImmediate(root); return meshes; } } }
98
orrb
openai
C#
#pragma warning disable 0219 using UnityEngine; using System.Text; using System.Collections; using System.Collections.Generic; using System.IO; namespace Parabox.STL { /** * Import methods for STL files. */ public static class pb_Stl_Importer { const int MAX_FACETS_PER_MESH = 65535 / 3; class Facet { public Vector3 normal; public Vector3 a, b, c; public override string ToString() { return string.Format("{0:F2}: {1:F2}, {2:F2}, {3:F2}", normal, a, b, c); } } /** * Import an STL file at path. */ public static List<Mesh> Import(string path) { if (IsBinary(path)) { try { return ImportBinary(path); } catch (System.Exception e) { Logger.Error("pb_Stl_Importer::ImportBinary::Failed importing mesh at path {0} -- {1}", path, e.Message); return null; } } else { return ImportAscii(path); } } private static List<Mesh> ImportBinary(string path) { using (FileStream fs = new FileStream(path, FileMode.Open, FileAccess.Read)) { return ImportBinaryStream(fs); } } public static List<Mesh> ImportBytes(byte[] bytes) { using (MemoryStream ms = new MemoryStream(bytes)) { return ImportBinaryStream(ms); } } private static List<Mesh> ImportBinaryStream(Stream stream) { Facet[] facets; using (BinaryReader br = new BinaryReader(stream, new ASCIIEncoding())) { // read header byte[] header = br.ReadBytes(80); uint facetCount = br.ReadUInt32(); facets = new Facet[facetCount]; for (uint i = 0; i < facetCount; i++) facets[i] = br.GetFacet(); } return CreateMeshWithFacets(facets); } private static Facet GetFacet(this BinaryReader binaryReader) { Facet facet = new Facet(); facet.normal = binaryReader.GetVector3(); // maintain counter-clockwise orientation of vertices: facet.a = binaryReader.GetVector3(); facet.b = binaryReader.GetVector3(); facet.c = binaryReader.GetVector3(); if (facet.normal.magnitude < 0.01f) { facet.normal = CalculateNormal(facet.a, facet.b, facet.c); } binaryReader.ReadUInt16(); // padding return facet; } private static Vector3 CalculateNormal(Vector3 a, Vector3 b, Vector3 c) { return Vector3.Cross(b - a, c - a).normalized; } private static Vector3 GetVector3(this BinaryReader binaryReader) { Vector3 vector3 = new Vector3(); for (int i = 0; i < 3; i++) vector3[i] = binaryReader.ReadSingle(); return vector3.UnityCoordTrafo(); } private static Vector3 UnityCoordTrafo(this Vector3 vector3) { return new Vector3(vector3.x, vector3.y, vector3.z); } const int SOLID = 1; const int FACET = 2; const int OUTER = 3; const int VERTEX = 4; const int ENDLOOP = 5; const int ENDFACET = 6; const int ENDSOLID = 7; const int EMPTY = 0; private static int ReadState(string line) { if (line.StartsWith("solid")) return SOLID; else if (line.StartsWith("facet")) return FACET; else if (line.StartsWith("outer")) return OUTER; else if (line.StartsWith("vertex")) return VERTEX; else if (line.StartsWith("endloop")) return ENDLOOP; else if (line.StartsWith("endfacet")) return ENDFACET; else if (line.StartsWith("endsolid")) return ENDSOLID; else return EMPTY; } private static List<Mesh> ImportAscii(string path) { List<Facet> facets = new List<Facet>(); using (StreamReader sr = new StreamReader(path)) { string line; int state = EMPTY, vertex = 0; Facet f = null; bool exit = false; while (sr.Peek() > 0 && !exit) { line = sr.ReadLine().Trim(); int previousState = state; state = ReadState(line); switch (state) { case SOLID: continue; case FACET: f = new Facet(); f.normal = StringToVec3(line.Replace("facet normal ", "")); break; case OUTER: vertex = 0; break; case VERTEX: // maintain counter-clockwise orientation of vertices: if (vertex == 0) f.a = StringToVec3(line.Replace("vertex ", "")); else if (vertex == 2) f.c = StringToVec3(line.Replace("vertex ", "")); else if (vertex == 1) f.b = StringToVec3(line.Replace("vertex ", "")); vertex++; break; case ENDLOOP: break; case ENDFACET: facets.Add(f); break; case ENDSOLID: exit = true; break; case EMPTY: default: break; } } } return CreateMeshWithFacets(facets); } private static Vector3 StringToVec3(string str) { string[] split = str.Trim().Split(null); Vector3 v = new Vector3(); float.TryParse(split[0], out v.x); float.TryParse(split[1], out v.y); float.TryParse(split[2], out v.z); return v.UnityCoordTrafo(); } /** * Read the first 80 bytes of a file and if they are all 0x0 it's likely * that this file is binary. */ private static bool IsBinary(string path) { // http://stackoverflow.com/questions/968935/compare-binary-files-in-c-sharp FileInfo file = new FileInfo(path); if (file.Length < 130) return false; var isBinary = false; using (FileStream f0 = file.OpenRead()) { using (BufferedStream bs0 = new BufferedStream(f0)) { for (long i = 0; i < 80; i++) { var readByte = bs0.ReadByte(); if (readByte == 0x0) { isBinary = true; break; } } } } if (!isBinary) { using (FileStream f0 = file.OpenRead()) { using (BufferedStream bs0 = new BufferedStream(f0)) { var byteArray = new byte[6]; for (var i = 0; i < 6; i++) { byteArray[i] = (byte)bs0.ReadByte(); } var text = Encoding.UTF8.GetString(byteArray); isBinary = text != "solid "; } } } return isBinary; } private static int MeshesCount(int facets) { return (facets + MAX_FACETS_PER_MESH - 1) / MAX_FACETS_PER_MESH; } /** * @todo test with > USHORT_MAX vertex count meshes */ private static List<Mesh> CreateMeshWithFacets(IList<Facet> facets) { int facet_count = facets.Count, current_facet = 0, full_mesh_vertices = MAX_FACETS_PER_MESH * 3; List<Mesh> meshes = new List<Mesh>(); Vector3 center = Vector3.zero; foreach (Facet facet in facets) { center += facet.a + facet.b + facet.c; } center /= facets.Count * 3.0f; for (int i = 0; i < MeshesCount(facets.Count); i++) { int len = System.Math.Min(full_mesh_vertices, (facet_count - current_facet) * 3); Vector3[] v = new Vector3[len]; Vector3[] n = new Vector3[len]; Vector2[] uv = new Vector2[len]; int[] t = new int[len]; Dictionary<Vector3, List<Vector3>> normals = new Dictionary<Vector3, List<Vector3>>(); int facet_index = current_facet; for (int it = 0; it < len; it += 3) { v[it] = facets[facet_index].a; v[it + 1] = facets[facet_index].b; v[it + 2] = facets[facet_index].c; AddNormal(normals, facets[facet_index].a, facets[facet_index].normal.normalized); AddNormal(normals, facets[facet_index].b, facets[facet_index].normal.normalized); AddNormal(normals, facets[facet_index].c, facets[facet_index].normal.normalized); t[it] = it; t[it + 1] = it + 1; t[it + 2] = it + 2; uv[it] = GetUv(facets[facet_index].a - center); uv[it + 1] = GetUv(facets[facet_index].b - center); uv[it + 2] = GetUv(facets[facet_index].c - center); facet_index++; } facet_index = current_facet; for (int it = 0; it < len; it += 3) { n[it] = CalculateNormal(facets[facet_index].normal, normals[facets[facet_index].a]); n[it + 1] = CalculateNormal(facets[facet_index].normal, normals[facets[facet_index].b]); n[it + 2] = CalculateNormal(facets[facet_index].normal, normals[facets[facet_index].c]); facet_index++; } current_facet = facet_index; Mesh mesh = new Mesh(); mesh.vertices = v; mesh.normals = n; mesh.triangles = t; mesh.uv = uv; meshes.Add(mesh); } return meshes; } private static void AddNormal(Dictionary<Vector3, List<Vector3>> dictionary, Vector3 vertex, Vector3 normal) { if (dictionary.ContainsKey(vertex)) { dictionary[vertex].Add(normal); } else { dictionary[vertex] = new List<Vector3>(); dictionary[vertex].Add(normal); } } private static Vector3 CalculateNormal(Vector3 base_normal, List<Vector3> normal_list) { Vector3 sum = Vector3.zero; int sum_count = 0; foreach (Vector3 normal in normal_list) { if (Vector3.Angle(base_normal, normal) < 60.0) { sum += normal; sum_count++; } } return (sum / sum_count).normalized; } private static Vector2 GetUv(Vector3 dir) { return new Vector2(0.5f + Mathf.Atan2(dir.x, dir.y) / (Mathf.PI * 2.0f), 0.5f + Mathf.Acos(dir.normalized.z) / Mathf.PI); } } }
336
orrb
openai
C#
using UnityEngine; using UnityEditor; using System.Linq; using System.Collections; using System.IO; using System.Collections.Generic; namespace Parabox.STL { public class pb_Stl_AssetPostProcessor : AssetPostprocessor { private static void OnPostprocessAllAssets(string[] importedAssets, string[] deletedAssets, string[] movedAssets, string[] movedFromPath) { foreach(string path in importedAssets.Where(x => x.ToLowerInvariant().EndsWith(".stl"))) { string dir = Path.GetDirectoryName(path).Replace("\\", "/"); string name = Path.GetFileNameWithoutExtension(path); IList<Mesh> meshes = pb_Stl_Importer.Import(path); if(meshes == null) continue; GameObject cube = GameObject.CreatePrimitive(PrimitiveType.Cube); Material defaultDiffuse = cube.GetComponent<MeshRenderer>().sharedMaterial; GameObject.DestroyImmediate(cube); string prefab_path = string.Format("{0}/{1}.prefab", dir, name); #if UNITY_4_7 GameObject prefab_source = (GameObject) AssetDatabase.LoadAssetAtPath(prefab_path, typeof(GameObject)); #else GameObject prefab_source = AssetDatabase.LoadAssetAtPath<GameObject>(prefab_path); #endif GameObject prefab = new GameObject(); prefab.name = name; if(prefab_source == null) prefab_source = PrefabUtility.CreatePrefab(prefab_path, prefab); GameObject.DestroyImmediate(prefab); Object[] children = AssetDatabase.LoadAllAssetsAtPath(prefab_path); for(int i = 0; i < children.Length; i++) { if(AssetDatabase.IsSubAsset(children[i])) GameObject.DestroyImmediate(children[i], true); } for(int i = 0; i < meshes.Count; i++) AssetDatabase.AddObjectToAsset(meshes[i], prefab_source); children = AssetDatabase.LoadAllAssetsAtPath(prefab_path); GameObject render = new GameObject(); for(int i = 0; i < children.Length; i++) { Mesh m = children[i] as Mesh; if(m == null) continue; GameObject child = new GameObject(); child.name = string.Format("{0} ({1})", name, i); m.name = child.name; child.AddComponent<MeshFilter>().sharedMesh = m; child.AddComponent<MeshRenderer>().sharedMaterial = defaultDiffuse; child.transform.SetParent(render.transform, false); } PrefabUtility.ReplacePrefab(render, prefab_source, ReplacePrefabOptions.ReplaceNameBased); GameObject.DestroyImmediate(render); } } public static void CreateMeshAssetWithPath(string path) { string dir = Path.GetDirectoryName(path).Replace("\\", "/"); string name = Path.GetFileNameWithoutExtension(path); IList<Mesh> meshes = pb_Stl_Importer.Import(path); if(meshes == null) return; for(int i = 0; i < meshes.Count; i++) AssetDatabase.CreateAsset(meshes[i], string.Format("{0}/{1}{2}.asset", dir, name, i)); } [MenuItem("Tools/Force Import &d")] static void ditos() { foreach(Object o in Selection.objects) { CreateMeshAssetWithPath(AssetDatabase.GetAssetPath(o)); } } } }
97
orrb
openai
C#
using UnityEngine; using UnityEditor; using System.Collections; using System.Linq; namespace Parabox.STL { /** * Menu items for STL import/export. */ public class pb_Stl_Menu : Editor { [MenuItem("Assets/Export Model/STL (Ascii)", true)] [MenuItem("Assets/Export Model/STL (Binary)", true)] static bool VerifyExport() { return Selection.transforms.SelectMany(x => x.GetComponentsInChildren<MeshFilter>()).FirstOrDefault(y => y.sharedMesh != null) != null; } [MenuItem("Assets/Export Model/STL (Ascii)", false, 30)] static void MenuExportAscii() { ExportWithFileDialog(Selection.gameObjects, FileType.Ascii); } [MenuItem("Assets/Export Model/STL (Binary)", false, 30)] static void MenuExportBinary() { ExportWithFileDialog(Selection.gameObjects, FileType.Binary); } private static void ExportWithFileDialog(GameObject[] gameObjects, FileType type) { if(gameObjects == null || gameObjects.Length < 1) { Debug.LogWarning("Attempting to export STL file with no GameObject selected. For reasons that should be obvious this is not allowed."); return; } string path = EditorUtility.SaveFilePanel("Save Mesh to STL", "", gameObjects.FirstOrDefault().name, "stl"); if( pb_Stl_Exporter.Export(path, gameObjects, type) ) { string full = path.Replace("\\", "/"); // if the file was saved in project, ping it if(full.Contains(Application.dataPath)) { string relative = full.Replace(Application.dataPath, "Assets"); #if UNITY_4_7 Object o = (Object) AssetDatabase.LoadAssetAtPath(relative, typeof(Object)); #else Object o = AssetDatabase.LoadAssetAtPath<Object>(relative); #endif if(o != null) EditorGUIUtility.PingObject(o); AssetDatabase.Refresh(); } } } } }
66
orrb
openai
C#
#if !UNITY_4_7 using UnityEngine; using UnityEditor; using NUnit.Framework; using System.Collections.Generic; using Parabox.STL; using System.IO; /** * Editor tests for pb_Stl lib. */ public class pb_Stl_Tests { const string TEMP_FILE_DIR = "Assets/pb_Stl/Editor/Test/Temp"; const string TEST_MODELS = "Assets/pb_Stl/Editor/Test/Models/"; [Test] public void VerifyWriteASCII() { DoVerifyWriteString(TEST_MODELS + "Cylinder_ASCII_RH.stl", GameObject.CreatePrimitive(PrimitiveType.Cylinder)); DoVerifyWriteString(TEST_MODELS + "Sphere_ASCII_RH.stl", GameObject.CreatePrimitive(PrimitiveType.Sphere)); } [Test] public void VerifyWriteBinary() { if (!Directory.Exists(TEMP_FILE_DIR)) Directory.CreateDirectory(TEMP_FILE_DIR); DoVerifyWriteBinary(TEST_MODELS + "Cylinder_BINARY_RH.stl", GameObject.CreatePrimitive(PrimitiveType.Cylinder)); DoVerifyWriteBinary(TEST_MODELS + "Sphere_BINARY_RH.stl", GameObject.CreatePrimitive(PrimitiveType.Sphere)); Directory.Delete(TEMP_FILE_DIR, true); } [Test] public void TestExportMultiple() { GameObject a = GameObject.CreatePrimitive(PrimitiveType.Cube); GameObject b = GameObject.CreatePrimitive(PrimitiveType.Cube); a.transform.position = Vector3.right; b.transform.position = new Vector3(3f, 5f, 2.4f); b.transform.localRotation = Quaternion.Euler(new Vector3(45f, 45f, 10f)); if (!Directory.Exists(TEMP_FILE_DIR)) Directory.CreateDirectory(TEMP_FILE_DIR); string temp_model_path = string.Format("{0}/multiple.stl", TEMP_FILE_DIR); pb_Stl_Exporter.Export(temp_model_path, new GameObject[] { a, b }, FileType.Binary); // Comparing binary files isn't great // Assert.IsTrue(CompareFiles(string.Format("{0}/CompositeCubes_BINARY.stl", TEST_MODELS), temp_model_path)); IList<Mesh> expected = pb_Stl_Importer.Import(string.Format("{0}/CompositeCubes_BINARY.stl", TEST_MODELS)); IList<Mesh> results = pb_Stl_Importer.Import(temp_model_path); Assert.IsTrue(expected != null); Assert.IsTrue(results != null); Assert.IsTrue(expected.Count == 1); Assert.IsTrue(results.Count == 1); Assert.AreEqual(expected[0].vertexCount, results[0].vertexCount); Assert.AreEqual(expected[0].triangles, results[0].triangles); // Can't use Assert.AreEqual(positions, normals, uvs) because Vec3 comparison is subject to floating point inaccuracy for (int i = 0; i < expected[0].vertexCount; i++) { Assert.Less(Vector3.Distance(expected[0].vertices[i], results[0].vertices[i]), .00001f); Assert.Less(Vector3.Distance(expected[0].normals[i], results[0].normals[i]), .00001f); } GameObject.DestroyImmediate(a); GameObject.DestroyImmediate(b); Directory.Delete(TEMP_FILE_DIR, true); } [Test] public void TestImportAscii() { IList<Mesh> meshes = pb_Stl_Importer.Import(string.Format("{0}/Cylinder_ASCII_RH.stl", TEST_MODELS)); Assert.IsTrue(meshes != null); Assert.AreEqual(1, meshes.Count); Assert.AreEqual(240, meshes[0].triangles.Length); Assert.AreEqual(240, meshes[0].vertexCount); } [Test] public void TestImportBinary() { IList<Mesh> meshes = pb_Stl_Importer.Import(string.Format("{0}/Cylinder_BINARY_RH.stl", TEST_MODELS)); Assert.IsTrue(meshes != null); Assert.AreEqual(1, meshes.Count); Assert.AreEqual(240, meshes[0].triangles.Length); Assert.AreEqual(240, meshes[0].vertexCount); } [Test] public void TestImportBinaryWithHeaders() { IList<Mesh> meshes = pb_Stl_Importer.Import(string.Format("{0}/CubedShape_BINARY_H.stl", TEST_MODELS)); Assert.IsTrue(meshes != null); Assert.AreEqual(1, meshes.Count); Assert.AreEqual(204, meshes[0].triangles.Length); Assert.AreEqual(204, meshes[0].vertexCount); } private void DoVerifyWriteBinary(string expected_path, GameObject go) { string temp_model_path = string.Format("{0}/binary_file.stl", TEMP_FILE_DIR); Assert.IsTrue(pb_Stl.WriteFile(temp_model_path, go.GetComponent<MeshFilter>().sharedMesh, FileType.Binary)); Assert.IsTrue(CompareFiles(temp_model_path, expected_path)); GameObject.DestroyImmediate(go); } private void DoVerifyWriteString(string path, GameObject go) { string ascii = pb_Stl.WriteString(go.GetComponent<MeshFilter>().sharedMesh, true); // Replace Windows line endings with Unix // @todo Does STL spec care about line endings? ascii = ascii.Replace("\r\n", "\n"); Assert.AreNotEqual(ascii, null); Assert.AreNotEqual(ascii, ""); string expected = File.ReadAllText(path); Assert.AreNotEqual(expected, null); Assert.AreNotEqual(expected, ""); Assert.AreEqual(ascii, expected); GameObject.DestroyImmediate(go); } private bool CompareFiles(string left, string right) { if (left == null || right == null) return false; // http://stackoverflow.com/questions/968935/compare-binary-files-in-c-sharp FileInfo a = new FileInfo(left); FileInfo b = new FileInfo(right); if (a.Length != b.Length) return false; using (FileStream f0 = a.OpenRead()) using (FileStream f1 = b.OpenRead()) using (BufferedStream bs0 = new BufferedStream(f0)) using (BufferedStream bs1 = new BufferedStream(f1)) { for (long i = 0; i < a.Length; i++) { if (bs0.ReadByte() != bs1.ReadByte()) { return false; } } } return true; } } #endif
151
orrb
openai
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; // The CameraRandomizer modifies the position, rotation and field of view // of the scene cameras. There are three modes of operation: 'Jitter', 'Orbit' // and 'Both'. 'Jitter' adds random local perturabtions. 'Orbit' randomly orbits // the camera around a specific absolute location. Finally 'Both' applies both. // // Configurable properties: // enum mode - 'Jitter', 'Orbit' or 'Both', // float position_radius - maximal position perturbation distance, in meters, // used in 'Jitter' mode, // float quat_radius - maximal rotation perturbation magnitude, in radians, // used in 'Jitter' mode, // float fov_radius - maximal field of view perturbation, in degrees, used // in 'Jitter' mode, // vector3 orbit_center - the location around the camera should be randomly // rotated, used in 'Orbit' mode. public class CameraRandomizer : RendererComponent { public enum Mode { Jitter, Orbit, Both } [SerializeField] [ConfigProperty] public Mode mode_ = Mode.Jitter; [SerializeField] [ConfigProperty] public float position_radius_ = 0.02f; [SerializeField] [ConfigProperty] public float fov_radius_ = 1.0f; [SerializeField] [ConfigProperty] public float quat_radius_ = 0.03f; [SerializeField] [ConfigProperty] public Vector3 orbit_center_ = Vector3.zero; private struct CameraState { public float fov; public Vector3 pos; public Quaternion rot; public Camera camera; }; private List<CameraState> initial_camera_states_ = new List<CameraState>(); // Cache the original camera positions, rotations and field of view values. public override bool InitializeComponent(Orrb.RendererComponentConfig config) { Camera[] cameras = GetComponentsInChildren<Camera>(); foreach (Camera current_camera in cameras) { CameraState camera_state = new CameraState(); camera_state.camera = current_camera; camera_state.fov = current_camera.fieldOfView; camera_state.pos = current_camera.transform.localPosition; camera_state.rot = current_camera.transform.localRotation; initial_camera_states_.Add(camera_state); } return UpdateComponent(config); } public override bool RunComponent(RendererComponent.IOutputContext context) { // Calculate the perturbation ranges for position, location and field of view. float fov_min = -fov_radius_ / 2.0f; float fov_max = fov_radius_ / 2.0f; float pos_min = -position_radius_ / 2.0f; float pos_max = position_radius_ / 2.0f; float rot_min = -quat_radius_ / 2.0f; float rot_max = quat_radius_ / 2.0f; if (mode_ == Mode.Jitter || mode_ == Mode.Both) { // In 'Jitter' mode, apply random local perturbations on top of the // original cached state. foreach (CameraState camera_state in initial_camera_states_) { camera_state.camera.fieldOfView = camera_state.fov + Random.Range(fov_min, fov_max); camera_state.camera.transform.localPosition = camera_state.pos + new Vector3(Random.Range(pos_min, pos_max), Random.Range(pos_min, pos_max), Random.Range(pos_min, pos_max)); Vector3 axis = Random.rotationUniform * Vector3.up; camera_state.camera.transform.localRotation = camera_state.rot * Quaternion.AngleAxis(Random.Range(rot_min, rot_max) * Mathf.Rad2Deg, axis); } } if (mode_ == Mode.Orbit || mode_ == Mode.Both) { // In 'Orbit' mode rotate the camera around a line, going straight // up from the 'orbit_center' point. foreach (CameraState camera_state in initial_camera_states_) { camera_state.camera.transform.RotateAround(orbit_center_, Vector3.up, Random.Range(0.0f, 360.0f)); } } return true; } public override void DrawEditorGUI() { GUILayout.BeginVertical(); RendererComponent.GUISlider("position_radius", ref position_radius_, 0.0f, 1.0f); RendererComponent.GUISlider("fov_radius", ref fov_radius_, 0.0f, 20.0f); RendererComponent.GUISlider("quat_radius", ref quat_radius_, 0.0f, 1.0f); RendererComponent.GUIVector3("orbit_center", ref orbit_center_); GUILayout.EndVertical(); } }
113