repo_name
stringlengths 1
52
| repo_creator
stringclasses 6
values | programming_language
stringclasses 4
values | code
stringlengths 0
9.68M
| num_lines
int64 1
234k
|
---|---|---|---|---|
ml-agents | openai | C# | using UnityEngine;
namespace MLAgents
{
/// <summary>
/// This class contains logic for locomotion agents with joints which might make contact with the ground.
/// By attaching this as a component to those joints, their contact with the ground can be used as either
/// an observation for that agent, and/or a means of punishing the agent for making undesirable contact.
/// </summary>
[DisallowMultipleComponent]
public class GroundContact : MonoBehaviour
{
[HideInInspector] public Agent agent;
[Header("Ground Check")] public bool agentDoneOnGroundContact; // Whether to reset agent on ground contact.
public bool penalizeGroundContact; // Whether to penalize on contact.
public float groundContactPenalty; // Penalty amount (ex: -1).
public bool touchingGround;
private const string k_Ground = "ground"; // Tag of ground object.
/// <summary>
/// Check for collision with ground, and optionally penalize agent.
/// </summary>
void OnCollisionEnter(Collision col)
{
if (col.transform.CompareTag(k_Ground))
{
touchingGround = true;
if (penalizeGroundContact)
{
agent.SetReward(groundContactPenalty);
}
if (agentDoneOnGroundContact)
{
agent.Done();
}
}
}
/// <summary>
/// Check for end of ground collision and reset flag appropriately.
/// </summary>
void OnCollisionExit(Collision other)
{
if (other.transform.CompareTag(k_Ground))
{
touchingGround = false;
}
}
}
}
| 53 |
ml-agents | openai | C# | using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Serialization;
namespace MLAgents
{
/// <summary>
/// Used to store relevant information for acting and learning for each body part in agent.
/// </summary>
[System.Serializable]
public class BodyPart
{
[Header("Body Part Info")][Space(10)] public ConfigurableJoint joint;
public Rigidbody rb;
[HideInInspector] public Vector3 startingPos;
[HideInInspector] public Quaternion startingRot;
[Header("Ground & Target Contact")][Space(10)]
public GroundContact groundContact;
public TargetContact targetContact;
[FormerlySerializedAs("thisJDController")]
[HideInInspector] public JointDriveController thisJdController;
[Header("Current Joint Settings")][Space(10)]
public Vector3 currentEularJointRotation;
[HideInInspector] public float currentStrength;
public float currentXNormalizedRot;
public float currentYNormalizedRot;
public float currentZNormalizedRot;
[Header("Other Debug Info")][Space(10)]
public Vector3 currentJointForce;
public float currentJointForceSqrMag;
public Vector3 currentJointTorque;
public float currentJointTorqueSqrMag;
public AnimationCurve jointForceCurve = new AnimationCurve();
public AnimationCurve jointTorqueCurve = new AnimationCurve();
/// <summary>
/// Reset body part to initial configuration.
/// </summary>
public void Reset(BodyPart bp)
{
bp.rb.transform.position = bp.startingPos;
bp.rb.transform.rotation = bp.startingRot;
bp.rb.velocity = Vector3.zero;
bp.rb.angularVelocity = Vector3.zero;
if (bp.groundContact)
{
bp.groundContact.touchingGround = false;
}
if (bp.targetContact)
{
bp.targetContact.touchingTarget = false;
}
}
/// <summary>
/// Apply torque according to defined goal `x, y, z` angle and force `strength`.
/// </summary>
public void SetJointTargetRotation(float x, float y, float z)
{
x = (x + 1f) * 0.5f;
y = (y + 1f) * 0.5f;
z = (z + 1f) * 0.5f;
var xRot = Mathf.Lerp(joint.lowAngularXLimit.limit, joint.highAngularXLimit.limit, x);
var yRot = Mathf.Lerp(-joint.angularYLimit.limit, joint.angularYLimit.limit, y);
var zRot = Mathf.Lerp(-joint.angularZLimit.limit, joint.angularZLimit.limit, z);
currentXNormalizedRot =
Mathf.InverseLerp(joint.lowAngularXLimit.limit, joint.highAngularXLimit.limit, xRot);
currentYNormalizedRot = Mathf.InverseLerp(-joint.angularYLimit.limit, joint.angularYLimit.limit, yRot);
currentZNormalizedRot = Mathf.InverseLerp(-joint.angularZLimit.limit, joint.angularZLimit.limit, zRot);
joint.targetRotation = Quaternion.Euler(xRot, yRot, zRot);
currentEularJointRotation = new Vector3(xRot, yRot, zRot);
}
public void SetJointStrength(float strength)
{
var rawVal = (strength + 1f) * 0.5f * thisJdController.maxJointForceLimit;
var jd = new JointDrive
{
positionSpring = thisJdController.maxJointSpring,
positionDamper = thisJdController.jointDampen,
maximumForce = rawVal
};
joint.slerpDrive = jd;
currentStrength = jd.maximumForce;
}
}
public class JointDriveController : MonoBehaviour
{
[Header("Joint Drive Settings")][Space(10)]
public float maxJointSpring;
public float jointDampen;
public float maxJointForceLimit;
float m_FacingDot;
[HideInInspector] public Dictionary<Transform, BodyPart> bodyPartsDict = new Dictionary<Transform, BodyPart>();
[HideInInspector] public List<BodyPart> bodyPartsList = new List<BodyPart>();
/// <summary>
/// Create BodyPart object and add it to dictionary.
/// </summary>
public void SetupBodyPart(Transform t)
{
var bp = new BodyPart
{
rb = t.GetComponent<Rigidbody>(),
joint = t.GetComponent<ConfigurableJoint>(),
startingPos = t.position,
startingRot = t.rotation
};
bp.rb.maxAngularVelocity = 100;
// Add & setup the ground contact script
bp.groundContact = t.GetComponent<GroundContact>();
if (!bp.groundContact)
{
bp.groundContact = t.gameObject.AddComponent<GroundContact>();
bp.groundContact.agent = gameObject.GetComponent<Agent>();
}
else
{
bp.groundContact.agent = gameObject.GetComponent<Agent>();
}
// Add & setup the target contact script
bp.targetContact = t.GetComponent<TargetContact>();
if (!bp.targetContact)
{
bp.targetContact = t.gameObject.AddComponent<TargetContact>();
}
bp.thisJdController = this;
bodyPartsDict.Add(t, bp);
bodyPartsList.Add(bp);
}
public void GetCurrentJointForces()
{
foreach (var bodyPart in bodyPartsDict.Values)
{
if (bodyPart.joint)
{
bodyPart.currentJointForce = bodyPart.joint.currentForce;
bodyPart.currentJointForceSqrMag = bodyPart.joint.currentForce.magnitude;
bodyPart.currentJointTorque = bodyPart.joint.currentTorque;
bodyPart.currentJointTorqueSqrMag = bodyPart.joint.currentTorque.magnitude;
if (Application.isEditor)
{
if (bodyPart.jointForceCurve.length > 1000)
{
bodyPart.jointForceCurve = new AnimationCurve();
}
if (bodyPart.jointTorqueCurve.length > 1000)
{
bodyPart.jointTorqueCurve = new AnimationCurve();
}
bodyPart.jointForceCurve.AddKey(Time.time, bodyPart.currentJointForceSqrMag);
bodyPart.jointTorqueCurve.AddKey(Time.time, bodyPart.currentJointTorqueSqrMag);
}
}
}
}
}
}
| 180 |
ml-agents | openai | C# | using System.Collections.Generic;
using UnityEngine;
namespace MLAgents
{
public class RandomDecision : Decision
{
public override float[] Decide(
List<float> vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory)
{
if (brainParameters.vectorActionSpaceType == SpaceType.Continuous)
{
var act = new List<float>();
for (var i = 0; i < brainParameters.vectorActionSize[0]; i++)
{
act.Add(2 * Random.value - 1);
}
return act.ToArray();
}
else
{
var act = new float[brainParameters.vectorActionSize.Length];
for (var i = 0; i < brainParameters.vectorActionSize.Length; i++)
{
act[i] = Random.Range(0, brainParameters.vectorActionSize[i]);
}
return act;
}
}
public override List<float> MakeMemory(
List<float> vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory)
{
return new List<float>();
}
}
}
| 48 |
ml-agents | openai | C# | using System.Collections.Generic;
using UnityEngine;
public abstract class RayPerception : MonoBehaviour
{
protected List<float> m_PerceptionBuffer = new List<float>();
public virtual List<float> Perceive(float rayDistance,
float[] rayAngles, string[] detectableObjects,
float startOffset, float endOffset)
{
return m_PerceptionBuffer;
}
/// <summary>
/// Converts degrees to radians.
/// </summary>
public static float DegreeToRadian(float degree)
{
return degree * Mathf.PI / 180f;
}
}
| 23 |
ml-agents | openai | C# | using System.Collections.Generic;
using UnityEngine;
namespace MLAgents
{
/// <summary>
/// Ray 2D perception component. Attach this to agents to enable "local perception"
/// via the use of ray casts directed outward from the agent.
/// </summary>
public class RayPerception2D : RayPerception
{
Vector2 m_EndPosition;
RaycastHit2D m_Hit;
/// <summary>
/// Creates perception vector to be used as part of an observation of an agent.
/// Each ray in the rayAngles array adds a sublist of data to the observation.
/// The sublist contains the observation data for a single ray. The list is composed of the following:
/// 1. A one-hot encoding for detectable objects. For example, if detectableObjects.Length = n, the
/// first n elements of the sublist will be a one-hot encoding of the detectableObject that was hit, or
/// all zeroes otherwise.
/// 2. The 'length' element of the sublist will be 1 if the ray missed everything, or 0 if it hit
/// something (detectable or not).
/// 3. The 'length+1' element of the sublist will contain the normalised distance to the object hit.
/// NOTE: Only objects with tags in the detectableObjects array will have a distance set.
/// </summary>
/// <returns>The partial vector observation corresponding to the set of rays</returns>
/// <param name="rayDistance">Radius of rays</param>
/// <param name="rayAngles">Angles of rays (starting from (1,0) on unit circle).</param>
/// <param name="detectableObjects">List of tags which correspond to object types agent can see</param>
public List<float> Perceive(float rayDistance,
float[] rayAngles, string[] detectableObjects)
{
m_PerceptionBuffer.Clear();
// For each ray sublist stores categorical information on detected object
// along with object distance.
foreach (var angle in rayAngles)
{
m_EndPosition = transform.TransformDirection(
PolarToCartesian(rayDistance, angle));
if (Application.isEditor)
{
Debug.DrawRay(transform.position,
m_EndPosition, Color.black, 0.01f, true);
}
var subList = new float[detectableObjects.Length + 2];
m_Hit = Physics2D.CircleCast(transform.position, 0.5f, m_EndPosition, rayDistance);
if (m_Hit)
{
for (var i = 0; i < detectableObjects.Length; i++)
{
if (m_Hit.collider.gameObject.CompareTag(detectableObjects[i]))
{
subList[i] = 1;
subList[detectableObjects.Length + 1] = m_Hit.distance / rayDistance;
break;
}
}
}
else
{
subList[detectableObjects.Length] = 1f;
}
m_PerceptionBuffer.AddRange(subList);
}
return m_PerceptionBuffer;
}
/// <summary>
/// Converts polar coordinate to cartesian coordinate.
/// </summary>
public static Vector2 PolarToCartesian(float radius, float angle)
{
var x = radius * Mathf.Cos(DegreeToRadian(angle));
var y = radius * Mathf.Sin(DegreeToRadian(angle));
return new Vector2(x, y);
}
}
}
| 83 |
ml-agents | openai | C# | using System;
using System.Collections.Generic;
using UnityEngine;
namespace MLAgents
{
/// <summary>
/// Ray perception component. Attach this to agents to enable "local perception"
/// via the use of ray casts directed outward from the agent.
/// </summary>
public class RayPerception3D : RayPerception
{
Vector3 m_EndPosition;
RaycastHit m_Hit;
private float[] m_SubList;
/// <summary>
/// Creates perception vector to be used as part of an observation of an agent.
/// Each ray in the rayAngles array adds a sublist of data to the observation.
/// The sublist contains the observation data for a single ray. The list is composed of the following:
/// 1. A one-hot encoding for detectable objects. For example, if detectableObjects.Length = n, the
/// first n elements of the sublist will be a one-hot encoding of the detectableObject that was hit, or
/// all zeroes otherwise.
/// 2. The 'length' element of the sublist will be 1 if the ray missed everything, or 0 if it hit
/// something (detectable or not).
/// 3. The 'length+1' element of the sublist will contain the normalised distance to the object hit.
/// NOTE: Only objects with tags in the detectableObjects array will have a distance set.
/// </summary>
/// <returns>The partial vector observation corresponding to the set of rays</returns>
/// <param name="rayDistance">Radius of rays</param>
/// <param name="rayAngles">Angles of rays (starting from (1,0) on unit circle).</param>
/// <param name="detectableObjects">List of tags which correspond to object types agent can see</param>
/// <param name="startOffset">Starting height offset of ray from center of agent.</param>
/// <param name="endOffset">Ending height offset of ray from center of agent.</param>
public override List<float> Perceive(float rayDistance,
float[] rayAngles, string[] detectableObjects,
float startOffset, float endOffset)
{
if (m_SubList == null || m_SubList.Length != detectableObjects.Length + 2)
m_SubList = new float[detectableObjects.Length + 2];
m_PerceptionBuffer.Clear();
m_PerceptionBuffer.Capacity = m_SubList.Length * rayAngles.Length;
// For each ray sublist stores categorical information on detected object
// along with object distance.
foreach (var angle in rayAngles)
{
m_EndPosition = transform.TransformDirection(
PolarToCartesian(rayDistance, angle));
m_EndPosition.y = endOffset;
if (Application.isEditor)
{
Debug.DrawRay(transform.position + new Vector3(0f, startOffset, 0f),
m_EndPosition, Color.black, 0.01f, true);
}
Array.Clear(m_SubList, 0, m_SubList.Length);
if (Physics.SphereCast(transform.position +
new Vector3(0f, startOffset, 0f), 0.5f,
m_EndPosition, out m_Hit, rayDistance))
{
for (var i = 0; i < detectableObjects.Length; i++)
{
if (m_Hit.collider.gameObject.CompareTag(detectableObjects[i]))
{
m_SubList[i] = 1;
m_SubList[detectableObjects.Length + 1] = m_Hit.distance / rayDistance;
break;
}
}
}
else
{
m_SubList[detectableObjects.Length] = 1f;
}
Utilities.AddRangeNoAlloc(m_PerceptionBuffer, m_SubList);
}
return m_PerceptionBuffer;
}
/// <summary>
/// Converts polar coordinate to cartesian coordinate.
/// </summary>
public static Vector3 PolarToCartesian(float radius, float angle)
{
var x = radius * Mathf.Cos(DegreeToRadian(angle));
var z = radius * Mathf.Sin(DegreeToRadian(angle));
return new Vector3(x, 0f, z);
}
}
}
| 96 |
ml-agents | openai | C# | using UnityEngine;
namespace MLAgents
{
/// <summary>
/// This class contains logic for locomotion agents with joints which might make contact with a target.
/// By attaching this as a component to those joints, their contact with the ground can be used as
/// an observation for that agent.
/// </summary>
[DisallowMultipleComponent]
public class TargetContact : MonoBehaviour
{
[Header("Detect Targets")] public bool touchingTarget;
private const string k_Target = "target"; // Tag on target object.
/// <summary>
/// Check for collision with a target.
/// </summary>
void OnCollisionEnter(Collision col)
{
if (col.transform.CompareTag(k_Target))
{
touchingTarget = true;
}
}
/// <summary>
/// Check for end of ground collision and reset flag appropriately.
/// </summary>
void OnCollisionExit(Collision other)
{
if (other.transform.CompareTag(k_Target))
{
touchingTarget = false;
}
}
}
}
| 39 |
ml-agents | openai | C# | using UnityEngine;
using MLAgents;
public class AgentSoccer : Agent
{
public enum Team
{
Purple,
Blue
}
public enum AgentRole
{
Striker,
Goalie
}
public Team team;
public AgentRole agentRole;
float m_KickPower;
int m_PlayerIndex;
public SoccerFieldArea area;
[HideInInspector]
public Rigidbody agentRb;
SoccerAcademy m_Academy;
Renderer m_AgentRenderer;
RayPerception m_RayPer;
float[] m_RayAngles = { 0f, 45f, 90f, 135f, 180f, 110f, 70f };
string[] m_DetectableObjectsPurple = { "ball", "purpleGoal", "blueGoal",
"wall", "purpleAgent", "blueAgent" };
string[] m_DetectableObjectsBlue = { "ball", "blueGoal", "purpleGoal",
"wall", "blueAgent", "purpleAgent" };
public void ChooseRandomTeam()
{
team = (Team)Random.Range(0, 2);
if (team == Team.Purple)
{
JoinPurpleTeam(agentRole);
}
else
{
JoinBlueTeam(agentRole);
}
}
public void JoinPurpleTeam(AgentRole role)
{
agentRole = role;
team = Team.Purple;
m_AgentRenderer.material = m_Academy.purpleMaterial;
tag = "purpleAgent";
}
public void JoinBlueTeam(AgentRole role)
{
agentRole = role;
team = Team.Blue;
m_AgentRenderer.material = m_Academy.blueMaterial;
tag = "blueAgent";
}
public override void InitializeAgent()
{
base.InitializeAgent();
m_AgentRenderer = GetComponentInChildren<Renderer>();
m_RayPer = GetComponent<RayPerception>();
m_Academy = FindObjectOfType<SoccerAcademy>();
agentRb = GetComponent<Rigidbody>();
agentRb.maxAngularVelocity = 500;
var playerState = new PlayerState
{
agentRb = agentRb,
startingPos = transform.position,
agentScript = this,
};
area.playerStates.Add(playerState);
m_PlayerIndex = area.playerStates.IndexOf(playerState);
playerState.playerIndex = m_PlayerIndex;
}
public override void CollectObservations()
{
var rayDistance = 20f;
string[] detectableObjects;
if (team == Team.Purple)
{
detectableObjects = m_DetectableObjectsPurple;
}
else
{
detectableObjects = m_DetectableObjectsBlue;
}
AddVectorObs(m_RayPer.Perceive(rayDistance, m_RayAngles, detectableObjects, 0f, 0f));
AddVectorObs(m_RayPer.Perceive(rayDistance, m_RayAngles, detectableObjects, 1f, 0f));
}
public void MoveAgent(float[] act)
{
var dirToGo = Vector3.zero;
var rotateDir = Vector3.zero;
var action = Mathf.FloorToInt(act[0]);
// Goalies and Strikers have slightly different action spaces.
if (agentRole == AgentRole.Goalie)
{
m_KickPower = 0f;
switch (action)
{
case 1:
dirToGo = transform.forward * 1f;
m_KickPower = 1f;
break;
case 2:
dirToGo = transform.forward * -1f;
break;
case 4:
dirToGo = transform.right * -1f;
break;
case 3:
dirToGo = transform.right * 1f;
break;
}
}
else
{
m_KickPower = 0f;
switch (action)
{
case 1:
dirToGo = transform.forward * 1f;
m_KickPower = 1f;
break;
case 2:
dirToGo = transform.forward * -1f;
break;
case 3:
rotateDir = transform.up * 1f;
break;
case 4:
rotateDir = transform.up * -1f;
break;
case 5:
dirToGo = transform.right * -0.75f;
break;
case 6:
dirToGo = transform.right * 0.75f;
break;
}
}
transform.Rotate(rotateDir, Time.deltaTime * 100f);
agentRb.AddForce(dirToGo * m_Academy.agentRunSpeed,
ForceMode.VelocityChange);
}
public override void AgentAction(float[] vectorAction, string textAction)
{
// Existential penalty for strikers.
if (agentRole == AgentRole.Striker)
{
AddReward(-1f / 3000f);
}
// Existential bonus for goalies.
if (agentRole == AgentRole.Goalie)
{
AddReward(1f / 3000f);
}
MoveAgent(vectorAction);
}
/// <summary>
/// Used to provide a "kick" to the ball.
/// </summary>
void OnCollisionEnter(Collision c)
{
var force = 2000f * m_KickPower;
if (c.gameObject.CompareTag("ball"))
{
var dir = c.contacts[0].point - transform.position;
dir = dir.normalized;
c.gameObject.GetComponent<Rigidbody>().AddForce(dir * force);
}
}
public override void AgentReset()
{
if (m_Academy.randomizePlayersTeamForTraining)
{
ChooseRandomTeam();
}
if (team == Team.Purple)
{
JoinPurpleTeam(agentRole);
transform.rotation = Quaternion.Euler(0f, -90f, 0f);
}
else
{
JoinBlueTeam(agentRole);
transform.rotation = Quaternion.Euler(0f, 90f, 0f);
}
transform.position = area.GetRandomSpawnPos(agentRole, team);
agentRb.velocity = Vector3.zero;
agentRb.angularVelocity = Vector3.zero;
SetResetParameters();
}
public void SetResetParameters()
{
area.ResetBall();
}
}
| 216 |
ml-agents | openai | C# | using UnityEngine;
using MLAgents;
public class SoccerAcademy : Academy
{
public Material purpleMaterial;
public Material blueMaterial;
public float gravityMultiplier = 1;
public bool randomizePlayersTeamForTraining = true;
public float agentRunSpeed;
public float strikerPunish; //if opponents scores, the striker gets this neg reward (-1)
public float strikerReward; //if team scores a goal they get a reward (+1)
public float goaliePunish; //if opponents score, goalie gets this neg reward (-1)
public float goalieReward; //if team scores, goalie gets this reward (currently 0...no reward. can play with this later)
void Start()
{
Physics.gravity *= gravityMultiplier; //for soccer a multiplier of 3 looks good
}
public override void AcademyReset()
{
Physics.gravity = new Vector3(0, -resetParameters["gravity"], 0);
}
public override void AcademyStep()
{
}
}
| 32 |
ml-agents | openai | C# | using UnityEngine;
public class SoccerBallController : MonoBehaviour
{
[HideInInspector]
public SoccerFieldArea area;
public AgentSoccer lastTouchedBy; //who was the last to touch the ball
public string agentTag; //will be used to check if collided with a agent
public string purpleGoalTag; //will be used to check if collided with red goal
public string blueGoalTag; //will be used to check if collided with blue goal
void OnCollisionEnter(Collision col)
{
if (col.gameObject.CompareTag(purpleGoalTag)) //ball touched red goal
{
area.GoalTouched(AgentSoccer.Team.Blue);
}
if (col.gameObject.CompareTag(blueGoalTag)) //ball touched blue goal
{
area.GoalTouched(AgentSoccer.Team.Purple);
}
}
}
| 24 |
ml-agents | openai | C# | using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Serialization;
[System.Serializable]
public class PlayerState
{
public int playerIndex;
[FormerlySerializedAs("agentRB")]
public Rigidbody agentRb;
public Vector3 startingPos;
public AgentSoccer agentScript;
public float ballPosReward;
}
public class SoccerFieldArea : MonoBehaviour
{
public GameObject ball;
[FormerlySerializedAs("ballRB")]
[HideInInspector]
public Rigidbody ballRb;
public GameObject ground;
public GameObject centerPitch;
SoccerBallController m_BallController;
public List<PlayerState> playerStates = new List<PlayerState>();
[HideInInspector]
public Vector3 ballStartingPos;
public GameObject goalTextUI;
[HideInInspector]
public bool canResetBall;
Material m_GroundMaterial;
Renderer m_GroundRenderer;
SoccerAcademy m_Academy;
public IEnumerator GoalScoredSwapGroundMaterial(Material mat, float time)
{
m_GroundRenderer.material = mat;
yield return new WaitForSeconds(time);
m_GroundRenderer.material = m_GroundMaterial;
}
void Awake()
{
m_Academy = FindObjectOfType<SoccerAcademy>();
m_GroundRenderer = centerPitch.GetComponent<Renderer>();
m_GroundMaterial = m_GroundRenderer.material;
canResetBall = true;
if (goalTextUI) { goalTextUI.SetActive(false); }
ballRb = ball.GetComponent<Rigidbody>();
m_BallController = ball.GetComponent<SoccerBallController>();
m_BallController.area = this;
ballStartingPos = ball.transform.position;
}
IEnumerator ShowGoalUI()
{
if (goalTextUI) goalTextUI.SetActive(true);
yield return new WaitForSeconds(.25f);
if (goalTextUI) goalTextUI.SetActive(false);
}
public void AllPlayersDone(float reward)
{
foreach (var ps in playerStates)
{
if (ps.agentScript.gameObject.activeInHierarchy)
{
if (reward != 0)
{
ps.agentScript.AddReward(reward);
}
ps.agentScript.Done();
}
}
}
public void GoalTouched(AgentSoccer.Team scoredTeam)
{
foreach (var ps in playerStates)
{
if (ps.agentScript.team == scoredTeam)
{
RewardOrPunishPlayer(ps, m_Academy.strikerReward, m_Academy.goalieReward);
}
else
{
RewardOrPunishPlayer(ps, m_Academy.strikerPunish, m_Academy.goaliePunish);
}
if (m_Academy.randomizePlayersTeamForTraining)
{
ps.agentScript.ChooseRandomTeam();
}
if (scoredTeam == AgentSoccer.Team.Purple)
{
StartCoroutine(GoalScoredSwapGroundMaterial(m_Academy.purpleMaterial, 1));
}
else
{
StartCoroutine(GoalScoredSwapGroundMaterial(m_Academy.blueMaterial, 1));
}
if (goalTextUI)
{
StartCoroutine(ShowGoalUI());
}
}
}
public void RewardOrPunishPlayer(PlayerState ps, float striker, float goalie)
{
if (ps.agentScript.agentRole == AgentSoccer.AgentRole.Striker)
{
ps.agentScript.AddReward(striker);
}
if (ps.agentScript.agentRole == AgentSoccer.AgentRole.Goalie)
{
ps.agentScript.AddReward(goalie);
}
ps.agentScript.Done(); //all agents need to be reset
}
public Vector3 GetRandomSpawnPos(AgentSoccer.AgentRole role, AgentSoccer.Team team)
{
var xOffset = 0f;
if (role == AgentSoccer.AgentRole.Goalie)
{
xOffset = 13f;
}
if (role == AgentSoccer.AgentRole.Striker)
{
xOffset = 7f;
}
if (team == AgentSoccer.Team.Blue)
{
xOffset = xOffset * -1f;
}
var randomSpawnPos = ground.transform.position +
new Vector3(xOffset, 0f, 0f)
+ (Random.insideUnitSphere * 2);
randomSpawnPos.y = ground.transform.position.y + 2;
return randomSpawnPos;
}
public Vector3 GetBallSpawnPosition()
{
var randomSpawnPos = ground.transform.position +
new Vector3(0f, 0f, 0f)
+ (Random.insideUnitSphere * 2);
randomSpawnPos.y = ground.transform.position.y + 2;
return randomSpawnPos;
}
public void ResetBall()
{
ball.transform.position = GetBallSpawnPosition();
ballRb.velocity = Vector3.zero;
ballRb.angularVelocity = Vector3.zero;
var ballScale = m_Academy.resetParameters["ball_scale"];
ballRb.transform.localScale = new Vector3(ballScale, ballScale, ballScale);
}
}
| 164 |
ml-agents | openai | C# | using UnityEngine;
using MLAgents;
public class TemplateAcademy : Academy
{
public override void AcademyReset()
{
}
public override void AcademyStep()
{
}
}
| 14 |
ml-agents | openai | C# | using UnityEngine;
using MLAgents;
public class TemplateAgent : Agent
{
public override void CollectObservations()
{
}
public override void AgentAction(float[] vectorAction, string textAction)
{
}
public override void AgentReset()
{
}
public override void AgentOnDone()
{
}
}
| 22 |
ml-agents | openai | C# | using System.Collections.Generic;
using UnityEngine;
using MLAgents;
public class TemplateDecision : Decision
{
public override float[] Decide(
List<float> vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory)
{
return new float[0];
}
public override List<float> MakeMemory(
List<float> vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory)
{
return new List<float>();
}
}
| 27 |
ml-agents | openai | C# | using UnityEngine;
public class HitWall : MonoBehaviour
{
public GameObject areaObject;
public int lastAgentHit;
private TennisArea m_Area;
private TennisAgent m_AgentA;
private TennisAgent m_AgentB;
// Use this for initialization
void Start()
{
m_Area = areaObject.GetComponent<TennisArea>();
m_AgentA = m_Area.agentA.GetComponent<TennisAgent>();
m_AgentB = m_Area.agentB.GetComponent<TennisAgent>();
}
private void OnTriggerExit(Collider other)
{
if (other.name == "over")
{
if (lastAgentHit == 0)
{
m_AgentA.AddReward(0.1f);
}
else
{
m_AgentB.AddReward(0.1f);
}
lastAgentHit = 0;
}
}
private void OnCollisionEnter(Collision collision)
{
if (collision.gameObject.CompareTag("iWall"))
{
if (collision.gameObject.name == "wallA")
{
if (lastAgentHit == 0)
{
m_AgentA.AddReward(-0.01f);
m_AgentB.SetReward(0);
m_AgentB.score += 1;
}
else
{
m_AgentA.SetReward(0);
m_AgentB.AddReward(-0.01f);
m_AgentA.score += 1;
}
}
else if (collision.gameObject.name == "wallB")
{
if (lastAgentHit == 0)
{
m_AgentA.AddReward(-0.01f);
m_AgentB.SetReward(0);
m_AgentB.score += 1;
}
else
{
m_AgentA.SetReward(0);
m_AgentB.AddReward(-0.01f);
m_AgentA.score += 1;
}
}
else if (collision.gameObject.name == "floorA")
{
if (lastAgentHit == 0 || lastAgentHit == -1)
{
m_AgentA.AddReward(-0.01f);
m_AgentB.SetReward(0);
m_AgentB.score += 1;
}
else
{
m_AgentA.AddReward(-0.01f);
m_AgentB.SetReward(0);
m_AgentB.score += 1;
}
}
else if (collision.gameObject.name == "floorB")
{
if (lastAgentHit == 1 || lastAgentHit == -1)
{
m_AgentA.SetReward(0);
m_AgentB.AddReward(-0.01f);
m_AgentA.score += 1;
}
else
{
m_AgentA.SetReward(0);
m_AgentB.AddReward(-0.01f);
m_AgentA.score += 1;
}
}
else if (collision.gameObject.name == "net")
{
if (lastAgentHit == 0)
{
m_AgentA.AddReward(-0.01f);
m_AgentB.SetReward(0);
m_AgentB.score += 1;
}
else
{
m_AgentA.SetReward(0);
m_AgentB.AddReward(-0.01f);
m_AgentA.score += 1;
}
}
m_AgentA.Done();
m_AgentB.Done();
m_Area.MatchReset();
}
if (collision.gameObject.CompareTag("agent"))
{
lastAgentHit = collision.gameObject.name == "AgentA" ? 0 : 1;
}
}
}
| 126 |
ml-agents | openai | C# | using UnityEngine;
using MLAgents;
public class TennisAcademy : Academy
{
public override void AcademyReset()
{
Physics.gravity = new Vector3(0, -resetParameters["gravity"], 0);
}
public override void AcademyStep()
{
}
}
| 15 |
ml-agents | openai | C# | using UnityEngine;
using UnityEngine.UI;
using MLAgents;
public class TennisAgent : Agent
{
[Header("Specific to Tennis")]
public GameObject ball;
public bool invertX;
public int score;
public GameObject myArea;
public float angle;
public float scale;
private Text m_TextComponent;
private Rigidbody m_AgentRb;
private Rigidbody m_BallRb;
private float m_InvertMult;
private ResetParameters m_ResetParams;
// Looks for the scoreboard based on the name of the gameObjects.
// Do not modify the names of the Score GameObjects
private const string k_CanvasName = "Canvas";
private const string k_ScoreBoardAName = "ScoreA";
private const string k_ScoreBoardBName = "ScoreB";
public override void InitializeAgent()
{
m_AgentRb = GetComponent<Rigidbody>();
m_BallRb = ball.GetComponent<Rigidbody>();
var canvas = GameObject.Find(k_CanvasName);
GameObject scoreBoard;
var academy = FindObjectOfType<Academy>();
m_ResetParams = academy.resetParameters;
if (invertX)
{
scoreBoard = canvas.transform.Find(k_ScoreBoardBName).gameObject;
}
else
{
scoreBoard = canvas.transform.Find(k_ScoreBoardAName).gameObject;
}
m_TextComponent = scoreBoard.GetComponent<Text>();
SetResetParameters();
}
public override void CollectObservations()
{
AddVectorObs(m_InvertMult * (transform.position.x - myArea.transform.position.x));
AddVectorObs(transform.position.y - myArea.transform.position.y);
AddVectorObs(m_InvertMult * m_AgentRb.velocity.x);
AddVectorObs(m_AgentRb.velocity.y);
AddVectorObs(m_InvertMult * (ball.transform.position.x - myArea.transform.position.x));
AddVectorObs(ball.transform.position.y - myArea.transform.position.y);
AddVectorObs(m_InvertMult * m_BallRb.velocity.x);
AddVectorObs(m_BallRb.velocity.y);
}
public override void AgentAction(float[] vectorAction, string textAction)
{
var moveX = Mathf.Clamp(vectorAction[0], -1f, 1f) * m_InvertMult;
var moveY = Mathf.Clamp(vectorAction[1], -1f, 1f);
if (moveY > 0.5 && transform.position.y - transform.parent.transform.position.y < -1.5f)
{
m_AgentRb.velocity = new Vector3(m_AgentRb.velocity.x, 7f, 0f);
}
m_AgentRb.velocity = new Vector3(moveX * 30f, m_AgentRb.velocity.y, 0f);
if (invertX && transform.position.x - transform.parent.transform.position.x < -m_InvertMult ||
!invertX && transform.position.x - transform.parent.transform.position.x > -m_InvertMult)
{
transform.position = new Vector3(-m_InvertMult + transform.parent.transform.position.x,
transform.position.y,
transform.position.z);
}
m_TextComponent.text = score.ToString();
}
public override void AgentReset()
{
m_InvertMult = invertX ? -1f : 1f;
transform.position = new Vector3(-m_InvertMult * Random.Range(6f, 8f), -1.5f, -3.5f) + transform.parent.transform.position;
m_AgentRb.velocity = new Vector3(0f, 0f, 0f);
SetResetParameters();
}
public void SetRacket()
{
angle = m_ResetParams["angle"];
gameObject.transform.eulerAngles = new Vector3(
gameObject.transform.eulerAngles.x,
gameObject.transform.eulerAngles.y,
m_InvertMult * angle
);
}
public void SetBall()
{
scale = m_ResetParams["scale"];
ball.transform.localScale = new Vector3(scale, scale, scale);
}
public void SetResetParameters()
{
SetRacket();
SetBall();
}
}
| 115 |
ml-agents | openai | C# | using UnityEngine;
public class TennisArea : MonoBehaviour
{
public GameObject ball;
public GameObject agentA;
public GameObject agentB;
private Rigidbody m_BallRb;
// Use this for initialization
void Start()
{
m_BallRb = ball.GetComponent<Rigidbody>();
MatchReset();
}
public void MatchReset()
{
var ballOut = Random.Range(6f, 8f);
var flip = Random.Range(0, 2);
if (flip == 0)
{
ball.transform.position = new Vector3(-ballOut, 6f, 0f) + transform.position;
}
else
{
ball.transform.position = new Vector3(ballOut, 6f, 0f) + transform.position;
}
m_BallRb.velocity = new Vector3(0f, 0f, 0f);
ball.transform.localScale = new Vector3(1, 1, 1);
ball.GetComponent<HitWall>().lastAgentHit = -1;
}
void FixedUpdate()
{
var rgV = m_BallRb.velocity;
m_BallRb.velocity = new Vector3(Mathf.Clamp(rgV.x, -9f, 9f), Mathf.Clamp(rgV.y, -9f, 9f), rgV.z);
}
}
| 40 |
ml-agents | openai | C# | using UnityEngine;
using MLAgents;
public class WalkerAcademy : Academy
{
public override void InitializeAcademy()
{
Monitor.verticalOffset = 1f;
// We increase the Physics solver iterations in order to
// make walker joint calculations more accurate.
Physics.defaultSolverIterations = 12;
Physics.defaultSolverVelocityIterations = 12;
Time.fixedDeltaTime = 0.01333f; //(75fps). default is .2 (60fps)
Time.maximumDeltaTime = .15f; // Default is .33
}
public override void AcademyReset()
{
Physics.gravity = new Vector3(0, -resetParameters["gravity"], 0);
}
public override void AcademyStep()
{
}
}
| 27 |
ml-agents | openai | C# | using UnityEngine;
using MLAgents;
public class WalkerAgent : Agent
{
[Header("Specific to Walker")][Header("Target To Walk Towards")][Space(10)]
public Transform target;
Vector3 m_DirToTarget;
public Transform hips;
public Transform chest;
public Transform spine;
public Transform head;
public Transform thighL;
public Transform shinL;
public Transform footL;
public Transform thighR;
public Transform shinR;
public Transform footR;
public Transform armL;
public Transform forearmL;
public Transform handL;
public Transform armR;
public Transform forearmR;
public Transform handR;
JointDriveController m_JdController;
bool m_IsNewDecisionStep;
int m_CurrentDecisionStep;
private Rigidbody m_HipsRb;
private Rigidbody m_ChestRb;
private Rigidbody m_SpineRb;
private ResetParameters m_ResetParams;
public override void InitializeAgent()
{
m_JdController = GetComponent<JointDriveController>();
m_JdController.SetupBodyPart(hips);
m_JdController.SetupBodyPart(chest);
m_JdController.SetupBodyPart(spine);
m_JdController.SetupBodyPart(head);
m_JdController.SetupBodyPart(thighL);
m_JdController.SetupBodyPart(shinL);
m_JdController.SetupBodyPart(footL);
m_JdController.SetupBodyPart(thighR);
m_JdController.SetupBodyPart(shinR);
m_JdController.SetupBodyPart(footR);
m_JdController.SetupBodyPart(armL);
m_JdController.SetupBodyPart(forearmL);
m_JdController.SetupBodyPart(handL);
m_JdController.SetupBodyPart(armR);
m_JdController.SetupBodyPart(forearmR);
m_JdController.SetupBodyPart(handR);
m_HipsRb = hips.GetComponent<Rigidbody>();
m_ChestRb = chest.GetComponent<Rigidbody>();
m_SpineRb = spine.GetComponent<Rigidbody>();
var academy = FindObjectOfType<WalkerAcademy>();
m_ResetParams = academy.resetParameters;
SetResetParameters();
}
/// <summary>
/// Add relevant information on each body part to observations.
/// </summary>
public void CollectObservationBodyPart(BodyPart bp)
{
var rb = bp.rb;
AddVectorObs(bp.groundContact.touchingGround ? 1 : 0); // Is this bp touching the ground
AddVectorObs(rb.velocity);
AddVectorObs(rb.angularVelocity);
var localPosRelToHips = hips.InverseTransformPoint(rb.position);
AddVectorObs(localPosRelToHips);
if (bp.rb.transform != hips && bp.rb.transform != handL && bp.rb.transform != handR &&
bp.rb.transform != footL && bp.rb.transform != footR && bp.rb.transform != head)
{
AddVectorObs(bp.currentXNormalizedRot);
AddVectorObs(bp.currentYNormalizedRot);
AddVectorObs(bp.currentZNormalizedRot);
AddVectorObs(bp.currentStrength / m_JdController.maxJointForceLimit);
}
}
/// <summary>
/// Loop over body parts to add them to observation.
/// </summary>
public override void CollectObservations()
{
m_JdController.GetCurrentJointForces();
AddVectorObs(m_DirToTarget.normalized);
AddVectorObs(m_JdController.bodyPartsDict[hips].rb.position);
AddVectorObs(hips.forward);
AddVectorObs(hips.up);
foreach (var bodyPart in m_JdController.bodyPartsDict.Values)
{
CollectObservationBodyPart(bodyPart);
}
}
public override void AgentAction(float[] vectorAction, string textAction)
{
m_DirToTarget = target.position - m_JdController.bodyPartsDict[hips].rb.position;
// Apply action to all relevant body parts.
if (m_IsNewDecisionStep)
{
var bpDict = m_JdController.bodyPartsDict;
var i = -1;
bpDict[chest].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], vectorAction[++i]);
bpDict[spine].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], vectorAction[++i]);
bpDict[thighL].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], 0);
bpDict[thighR].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], 0);
bpDict[shinL].SetJointTargetRotation(vectorAction[++i], 0, 0);
bpDict[shinR].SetJointTargetRotation(vectorAction[++i], 0, 0);
bpDict[footR].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], vectorAction[++i]);
bpDict[footL].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], vectorAction[++i]);
bpDict[armL].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], 0);
bpDict[armR].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], 0);
bpDict[forearmL].SetJointTargetRotation(vectorAction[++i], 0, 0);
bpDict[forearmR].SetJointTargetRotation(vectorAction[++i], 0, 0);
bpDict[head].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], 0);
//update joint strength settings
bpDict[chest].SetJointStrength(vectorAction[++i]);
bpDict[spine].SetJointStrength(vectorAction[++i]);
bpDict[head].SetJointStrength(vectorAction[++i]);
bpDict[thighL].SetJointStrength(vectorAction[++i]);
bpDict[shinL].SetJointStrength(vectorAction[++i]);
bpDict[footL].SetJointStrength(vectorAction[++i]);
bpDict[thighR].SetJointStrength(vectorAction[++i]);
bpDict[shinR].SetJointStrength(vectorAction[++i]);
bpDict[footR].SetJointStrength(vectorAction[++i]);
bpDict[armL].SetJointStrength(vectorAction[++i]);
bpDict[forearmL].SetJointStrength(vectorAction[++i]);
bpDict[armR].SetJointStrength(vectorAction[++i]);
bpDict[forearmR].SetJointStrength(vectorAction[++i]);
}
IncrementDecisionTimer();
// Set reward for this step according to mixture of the following elements.
// a. Velocity alignment with goal direction.
// b. Rotation alignment with goal direction.
// c. Encourage head height.
// d. Discourage head movement.
AddReward(
+0.03f * Vector3.Dot(m_DirToTarget.normalized, m_JdController.bodyPartsDict[hips].rb.velocity)
+ 0.01f * Vector3.Dot(m_DirToTarget.normalized, hips.forward)
+ 0.02f * (head.position.y - hips.position.y)
- 0.01f * Vector3.Distance(m_JdController.bodyPartsDict[head].rb.velocity,
m_JdController.bodyPartsDict[hips].rb.velocity)
);
}
/// <summary>
/// Only change the joint settings based on decision frequency.
/// </summary>
public void IncrementDecisionTimer()
{
if (m_CurrentDecisionStep == agentParameters.numberOfActionsBetweenDecisions ||
agentParameters.numberOfActionsBetweenDecisions == 1)
{
m_CurrentDecisionStep = 1;
m_IsNewDecisionStep = true;
}
else
{
m_CurrentDecisionStep++;
m_IsNewDecisionStep = false;
}
}
/// <summary>
/// Loop over body parts and reset them to initial conditions.
/// </summary>
public override void AgentReset()
{
if (m_DirToTarget != Vector3.zero)
{
transform.rotation = Quaternion.LookRotation(m_DirToTarget);
}
foreach (var bodyPart in m_JdController.bodyPartsDict.Values)
{
bodyPart.Reset(bodyPart);
}
m_IsNewDecisionStep = true;
m_CurrentDecisionStep = 1;
SetResetParameters();
}
public void SetTorsoMass()
{
m_ChestRb.mass = m_ResetParams["chest_mass"];
m_SpineRb.mass = m_ResetParams["spine_mass"];
m_HipsRb.mass = m_ResetParams["hip_mass"];
}
public void SetResetParameters()
{
SetTorsoMass();
}
}
| 215 |
ml-agents | openai | C# | using UnityEngine;
using MLAgents;
public class WallJumpAcademy : Academy
{
[Header("Specific to WallJump")]
public float agentRunSpeed;
public float agentJumpHeight;
//when a goal is scored the ground will use this material for a few seconds.
public Material goalScoredMaterial;
//when fail, the ground will use this material for a few seconds.
public Material failMaterial;
[HideInInspector]
//use ~3 to make things less floaty
public float gravityMultiplier = 2.5f;
[HideInInspector]
public float agentJumpVelocity = 777;
[HideInInspector]
public float agentJumpVelocityMaxChange = 10;
// Use this for initialization
public override void InitializeAcademy()
{
Physics.gravity *= gravityMultiplier;
}
}
| 28 |
ml-agents | openai | C# | //Put this script on your blue cube.
using System.Collections;
using UnityEngine;
using MLAgents;
public class WallJumpAgent : Agent
{
// Depending on this value, the wall will have different height
int m_Configuration;
// Brain to use when no wall is present
public Brain noWallBrain;
// Brain to use when a jumpable wall is present
public Brain smallWallBrain;
// Brain to use when a wall requiring a block to jump over is present
public Brain bigWallBrain;
public GameObject ground;
public GameObject spawnArea;
Bounds m_SpawnAreaBounds;
public GameObject goal;
public GameObject shortBlock;
public GameObject wall;
Rigidbody m_ShortBlockRb;
Rigidbody m_AgentRb;
Material m_GroundMaterial;
Renderer m_GroundRenderer;
WallJumpAcademy m_Academy;
RayPerception m_RayPer;
public float jumpingTime;
public float jumpTime;
// This is a downward force applied when falling to make jumps look
// less floaty
public float fallingForce;
// Use to check the coliding objects
public Collider[] hitGroundColliders = new Collider[3];
Vector3 m_JumpTargetPos;
Vector3 m_JumpStartingPos;
string[] m_DetectableObjects;
public override void InitializeAgent()
{
m_Academy = FindObjectOfType<WallJumpAcademy>();
m_RayPer = GetComponent<RayPerception>();
m_Configuration = Random.Range(0, 5);
m_DetectableObjects = new[] { "wall", "goal", "block" };
m_AgentRb = GetComponent<Rigidbody>();
m_ShortBlockRb = shortBlock.GetComponent<Rigidbody>();
m_SpawnAreaBounds = spawnArea.GetComponent<Collider>().bounds;
m_GroundRenderer = ground.GetComponent<Renderer>();
m_GroundMaterial = m_GroundRenderer.material;
spawnArea.SetActive(false);
}
// Begin the jump sequence
public void Jump()
{
jumpingTime = 0.2f;
m_JumpStartingPos = m_AgentRb.position;
}
/// <summary>
/// Does the ground check.
/// </summary>
/// <returns><c>true</c>, if the agent is on the ground,
/// <c>false</c> otherwise.</returns>
/// <param name="smallCheck"></param>
public bool DoGroundCheck(bool smallCheck)
{
if (!smallCheck)
{
hitGroundColliders = new Collider[3];
var o = gameObject;
Physics.OverlapBoxNonAlloc(
o.transform.position + new Vector3(0, -0.05f, 0),
new Vector3(0.95f / 2f, 0.5f, 0.95f / 2f),
hitGroundColliders,
o.transform.rotation);
var grounded = false;
foreach (var col in hitGroundColliders)
{
if (col != null && col.transform != transform &&
(col.CompareTag("walkableSurface") ||
col.CompareTag("block") ||
col.CompareTag("wall")))
{
grounded = true; //then we're grounded
break;
}
}
return grounded;
}
else
{
RaycastHit hit;
Physics.Raycast(transform.position + new Vector3(0, -0.05f, 0), -Vector3.up, out hit,
1f);
if (hit.collider != null &&
(hit.collider.CompareTag("walkableSurface") ||
hit.collider.CompareTag("block") ||
hit.collider.CompareTag("wall"))
&& hit.normal.y > 0.95f)
{
return true;
}
return false;
}
}
/// <summary>
/// Moves a rigidbody towards a position smoothly.
/// </summary>
/// <param name="targetPos">Target position.</param>
/// <param name="rb">The rigidbody to be moved.</param>
/// <param name="targetVel">The velocity to target during the
/// motion.</param>
/// <param name="maxVel">The maximum velocity posible.</param>
void MoveTowards(
Vector3 targetPos, Rigidbody rb, float targetVel, float maxVel)
{
var moveToPos = targetPos - rb.worldCenterOfMass;
var velocityTarget = Time.fixedDeltaTime * targetVel * moveToPos;
if (float.IsNaN(velocityTarget.x) == false)
{
rb.velocity = Vector3.MoveTowards(
rb.velocity, velocityTarget, maxVel);
}
}
public override void CollectObservations()
{
var rayDistance = 20f;
float[] rayAngles = { 0f, 45f, 90f, 135f, 180f, 110f, 70f };
AddVectorObs(m_RayPer.Perceive(
rayDistance, rayAngles, m_DetectableObjects, 0f, 0f));
AddVectorObs(m_RayPer.Perceive(
rayDistance, rayAngles, m_DetectableObjects, 2.5f, 2.5f));
var agentPos = m_AgentRb.position - ground.transform.position;
AddVectorObs(agentPos / 20f);
AddVectorObs(DoGroundCheck(true) ? 1 : 0);
}
/// <summary>
/// Gets a random spawn position in the spawningArea.
/// </summary>
/// <returns>The random spawn position.</returns>
public Vector3 GetRandomSpawnPos()
{
var randomPosX = Random.Range(-m_SpawnAreaBounds.extents.x,
m_SpawnAreaBounds.extents.x);
var randomPosZ = Random.Range(-m_SpawnAreaBounds.extents.z,
m_SpawnAreaBounds.extents.z);
var randomSpawnPos = spawnArea.transform.position +
new Vector3(randomPosX, 0.45f, randomPosZ);
return randomSpawnPos;
}
/// <summary>
/// Chenges the color of the ground for a moment
/// </summary>
/// <returns>The Enumerator to be used in a Coroutine</returns>
/// <param name="mat">The material to be swaped.</param>
/// <param name="time">The time the material will remain.</param>
IEnumerator GoalScoredSwapGroundMaterial(Material mat, float time)
{
m_GroundRenderer.material = mat;
yield return new WaitForSeconds(time); //wait for 2 sec
m_GroundRenderer.material = m_GroundMaterial;
}
public void MoveAgent(float[] act)
{
AddReward(-0.0005f);
var smallGrounded = DoGroundCheck(true);
var largeGrounded = DoGroundCheck(false);
var dirToGo = Vector3.zero;
var rotateDir = Vector3.zero;
var dirToGoForwardAction = (int)act[0];
var rotateDirAction = (int)act[1];
var dirToGoSideAction = (int)act[2];
var jumpAction = (int)act[3];
if (dirToGoForwardAction == 1)
dirToGo = (largeGrounded ? 1f : 0.5f) * 1f * transform.forward;
else if (dirToGoForwardAction == 2)
dirToGo = (largeGrounded ? 1f : 0.5f) * -1f * transform.forward;
if (rotateDirAction == 1)
rotateDir = transform.up * -1f;
else if (rotateDirAction == 2)
rotateDir = transform.up * 1f;
if (dirToGoSideAction == 1)
dirToGo = (largeGrounded ? 1f : 0.5f) * -0.6f * transform.right;
else if (dirToGoSideAction == 2)
dirToGo = (largeGrounded ? 1f : 0.5f) * 0.6f * transform.right;
if (jumpAction == 1)
if ((jumpingTime <= 0f) && smallGrounded)
{
Jump();
}
transform.Rotate(rotateDir, Time.fixedDeltaTime * 300f);
m_AgentRb.AddForce(dirToGo * m_Academy.agentRunSpeed,
ForceMode.VelocityChange);
if (jumpingTime > 0f)
{
m_JumpTargetPos =
new Vector3(m_AgentRb.position.x,
m_JumpStartingPos.y + m_Academy.agentJumpHeight,
m_AgentRb.position.z) + dirToGo;
MoveTowards(m_JumpTargetPos, m_AgentRb, m_Academy.agentJumpVelocity,
m_Academy.agentJumpVelocityMaxChange);
}
if (!(jumpingTime > 0f) && !largeGrounded)
{
m_AgentRb.AddForce(
Vector3.down * fallingForce, ForceMode.Acceleration);
}
jumpingTime -= Time.fixedDeltaTime;
}
public override void AgentAction(float[] vectorAction, string textAction)
{
MoveAgent(vectorAction);
if ((!Physics.Raycast(m_AgentRb.position, Vector3.down, 20))
|| (!Physics.Raycast(m_ShortBlockRb.position, Vector3.down, 20)))
{
Done();
SetReward(-1f);
ResetBlock(m_ShortBlockRb);
StartCoroutine(
GoalScoredSwapGroundMaterial(m_Academy.failMaterial, .5f));
}
}
// Detect when the agent hits the goal
void OnTriggerStay(Collider col)
{
if (col.gameObject.CompareTag("goal") && DoGroundCheck(true))
{
SetReward(1f);
Done();
StartCoroutine(
GoalScoredSwapGroundMaterial(m_Academy.goalScoredMaterial, 2));
}
}
//Reset the orange block position
void ResetBlock(Rigidbody blockRb)
{
blockRb.transform.position = GetRandomSpawnPos();
blockRb.velocity = Vector3.zero;
blockRb.angularVelocity = Vector3.zero;
}
public override void AgentReset()
{
ResetBlock(m_ShortBlockRb);
transform.localPosition = new Vector3(
18 * (Random.value - 0.5f), 1, -12);
m_Configuration = Random.Range(0, 5);
m_AgentRb.velocity = default(Vector3);
}
private void FixedUpdate()
{
if (m_Configuration != -1)
{
ConfigureAgent(m_Configuration);
m_Configuration = -1;
}
}
/// <summary>
/// Configures the agent. Given an integer config, the wall will have
/// different height and a different brain will be assigned to the agent.
/// </summary>
/// <param name="config">Config.
/// If 0 : No wall and noWallBrain.
/// If 1: Small wall and smallWallBrain.
/// Other : Tall wall and BigWallBrain. </param>
void ConfigureAgent(int config)
{
var localScale = wall.transform.localScale;
if (config == 0)
{
localScale = new Vector3(
localScale.x,
m_Academy.resetParameters["no_wall_height"],
localScale.z);
wall.transform.localScale = localScale;
GiveBrain(noWallBrain);
}
else if (config == 1)
{
localScale = new Vector3(
localScale.x,
m_Academy.resetParameters["small_wall_height"],
localScale.z);
wall.transform.localScale = localScale;
GiveBrain(smallWallBrain);
}
else
{
var height =
m_Academy.resetParameters["big_wall_min_height"] +
Random.value * (m_Academy.resetParameters["big_wall_max_height"] -
m_Academy.resetParameters["big_wall_min_height"]);
localScale = new Vector3(
localScale.x,
height,
localScale.z);
wall.transform.localScale = localScale;
GiveBrain(bigWallBrain);
}
}
}
| 330 |
ml-agents | openai | C# | using System.IO;
using UnityEditor;
using UnityEngine;
using UnityEditor.Experimental.AssetImporters;
namespace Barracuda
{
/// <summary>
/// Asset Importer of barracuda models.
/// </summary>
[ScriptedImporter(1, new[] {"nn"})]
public class NNModelImporter : ScriptedImporter
{
private const string k_IconName = "NNModelIcon";
private Texture2D m_IconTexture;
public override void OnImportAsset(AssetImportContext ctx)
{
var model = File.ReadAllBytes(ctx.assetPath);
var asset = ScriptableObject.CreateInstance<NNModel>();
asset.Value = model;
ctx.AddObjectToAsset("main obj", asset, LoadIconTexture());
ctx.SetMainObject(asset);
}
private Texture2D LoadIconTexture()
{
if (m_IconTexture == null)
{
var allCandidates = AssetDatabase.FindAssets(k_IconName);
if (allCandidates.Length > 0)
{
m_IconTexture = AssetDatabase.LoadAssetAtPath(AssetDatabase.GUIDToAssetPath(allCandidates[0]), typeof(Texture2D)) as Texture2D;
}
}
return m_IconTexture;
}
}
}
| 43 |
ml-agents | openai | C# | #if UNITY_IOS
using System.Runtime.InteropServices;
using Barracuda;
using UnityEngine;
using UnityEngine.Scripting;
[Preserve]
public class iOSBLAS : BLASPlugin
{
[DllImport("__Internal")]
static extern unsafe void iossgemm(float* Ap, int AN, int AM,
float* Bp, int BN, int BM,
float* Cp, int CN, int CM,
int bs, bool transposeA, bool transposeB);
public bool IsCurrentPlatformSupported()
{
return Application.platform == RuntimePlatform.IPhonePlayer;
}
public unsafe void SGEMM(float* Ap, int AN, int AM, float* Bp, int BN, int BM, float* Cp, int CN, int CM, int bs,
bool transposeA = false, bool transposeB = false)
{
iossgemm(Ap, AN, AM, Bp, BN, BM, Cp, CN, CM, bs, transposeA, transposeB);
}
}
#endif // UNITY_IOS
| 28 |
ml-agents | openai | C# | #if UNITY_STANDALONE_OSX || UNITY_EDITOR_OSX
using System.Runtime.InteropServices;
using Barracuda;
using UnityEngine;
using UnityEngine.Scripting;
[Preserve]
public class MacBLAS : BLASPlugin
{
[DllImport("macblas")]
static extern unsafe void macsgemm(float* ap, int an, int am,
float* bp, int bn, int bm,
float* cp, int cn, int cm,
int bs, bool transposeA, bool transposeB);
public bool IsCurrentPlatformSupported()
{
return Application.platform == RuntimePlatform.OSXEditor ||
Application.platform == RuntimePlatform.OSXPlayer;
}
public unsafe void SGEMM(float* ap, int an, int am, float* bp, int bn, int bm, float* cp, int cn, int cm, int bs,
bool transposeA = false, bool transposeB = false)
{
macsgemm(ap, an, am, bp, bn, bm, cp, cn, cm, bs, transposeA, transposeB);
}
}
#endif // UNITY_OSX
| 30 |
ml-agents | openai | C# | using UnityEngine;
using System.IO;
using System.Linq;
using UnityEngine.Serialization;
#if UNITY_EDITOR
using UnityEditor;
#endif
/**
* Welcome to Unity Machine Learning Agents (ML-Agents).
*
* The ML-Agents toolkit contains five entities: Academy, Brain, Agent, Communicator and
* Python API. The academy, and all its brains and connected agents live within
* a learning environment (herin called Environment), while the communicator
* manages the communication between the learning environment and the Python
* API. For more information on each of these entities, in addition to how to
* set-up a learning environment and train the behavior of characters in a
* Unity scene, please browse our documentation pages on GitHub:
* https://github.com/Unity-Technologies/ml-agents/blob/master/docs/
*/
namespace MLAgents
{
/// <summary>
/// Wraps the environment-level parameters that are provided within the
/// Editor. These parameters can be provided for training and inference
/// modes separately and represent screen resolution, rendering quality and
/// frame rate.
/// </summary>
[System.Serializable]
public class EnvironmentConfiguration
{
[Tooltip("Width of the environment window in pixels.")]
public int width;
[Tooltip("Height of the environment window in pixels.")]
public int height;
[Tooltip("Rendering quality of environment. (Higher is better quality.)")]
[Range(0, 5)]
public int qualityLevel;
[Tooltip("Speed at which environment is run. (Higher is faster.)")]
[Range(1f, 100f)]
public float timeScale;
[Tooltip("Frames per second (FPS) engine attempts to maintain.")]
public int targetFrameRate;
/// Initializes a new instance of the
/// <see cref="EnvironmentConfiguration"/> class.
/// <param name="width">Width of environment window (pixels).</param>
/// <param name="height">Height of environment window (pixels).</param>
/// <param name="qualityLevel">
/// Rendering quality of environment. Ranges from 0 to 5, with higher.
/// </param>
/// <param name="timeScale">
/// Speed at which environment is run. Ranges from 1 to 100, with higher
/// values representing faster speed.
/// </param>
/// <param name="targetFrameRate">
/// Target frame rate (per second) that the engine tries to maintain.
/// </param>
public EnvironmentConfiguration(
int width, int height, int qualityLevel,
float timeScale, int targetFrameRate)
{
this.width = width;
this.height = height;
this.qualityLevel = qualityLevel;
this.timeScale = timeScale;
this.targetFrameRate = targetFrameRate;
}
}
/// <summary>
/// An Academy is where Agent objects go to train their behaviors. More
/// specifically, an academy is a collection of Brain objects and each agent
/// in a scene is attached to one brain (a single brain may be attached to
/// multiple agents). Currently, this class is expected to be extended to
/// implement the desired academy behavior.
/// </summary>
/// <remarks>
/// When an academy is run, it can either be in inference or training mode.
/// The mode is determined by the presence or absence of a Communicator. In
/// the presence of a communicator, the academy is run in training mode where
/// the states and observations of each agent are sent through the
/// communicator. In the absence of a communciator, the academy is run in
/// inference mode where the agent behavior is determined by the brain
/// attached to it (which may be internal, heuristic or player).
/// </remarks>
[HelpURL("https://github.com/Unity-Technologies/ml-agents/blob/master/" +
"docs/Learning-Environment-Design-Academy.md")]
public abstract class Academy : MonoBehaviour
{
[SerializeField]
public BroadcastHub broadcastHub = new BroadcastHub();
private const string k_KApiVersion = "API-10";
/// Temporary storage for global gravity value
/// Used to restore oringal value when deriving Academy modifies it
private Vector3 m_OriginalGravity;
/// Temporary storage for global fixedDeltaTime value
/// Used to restore oringal value when deriving Academy modifies it
private float m_OriginalFixedDeltaTime;
/// Temporary storage for global maximumDeltaTime value
/// Used to restore oringal value when deriving Academy modifies it
private float m_OriginalMaximumDeltaTime;
// Fields provided in the Inspector
[FormerlySerializedAs("maxSteps")]
[SerializeField]
[Tooltip("The engine-level settings which correspond to rendering " +
"quality and engine speed during Training.")]
EnvironmentConfiguration m_TrainingConfiguration =
new EnvironmentConfiguration(80, 80, 1, 100.0f, -1);
[FormerlySerializedAs("inferenceConfiguration")]
[SerializeField]
[Tooltip("The engine-level settings which correspond to rendering " +
"quality and engine speed during Inference.")]
EnvironmentConfiguration m_InferenceConfiguration =
new EnvironmentConfiguration(1280, 720, 5, 1.0f, 60);
/// <summary>
/// Contains a mapping from parameter names to float values. They are
/// used in <see cref="AcademyReset"/> and <see cref="AcademyStep"/>
/// to modify elements in the environment at reset time.
/// </summary>
/// <remarks>
/// Default reset parameters are specified in the academy Editor, and can
/// be modified when training with an external Brain by passinga config
/// dictionary at reset.
/// </remarks>
[SerializeField]
[Tooltip("List of custom parameters that can be changed in the " +
"environment when it resets.")]
public ResetParameters resetParameters;
public CommunicatorObjects.CustomResetParameters customResetParameters;
// Fields not provided in the Inspector.
/// Boolean flag indicating whether a communicator is accessible by the
/// environment. This also specifies whether the environment is in
/// Training or Inference mode.
bool m_IsCommunicatorOn;
/// Keeps track of the id of the last communicator message received.
/// Remains 0 if there are no communicators. Is used to ensure that
/// the same message is not used multiple times.
private ulong m_LastCommunicatorMessageNumber;
/// If true, the Academy will use inference settings. This field is
/// initialized in <see cref="Awake"/> depending on the presence
/// or absence of a communicator. Furthermore, it can be modified by an
/// external Brain during reset via <see cref="SetIsInference"/>.
bool m_IsInference = true;
/// The number of episodes completed by the environment. Incremented
/// each time the environment is reset.
int m_EpisodeCount;
/// The number of steps completed within the current episide. Incremented
/// each time a step is taken in the environment. Is reset to 0 during
/// <see cref="AcademyReset"/>.
int m_StepCount;
/// The number of total number of steps completed during the whole simulation. Incremented
/// each time a step is taken in the environment.
int m_TotalStepCount;
/// Flag that indicates whether the inference/training mode of the
/// environment was switched by the external Brain. This impacts the
/// engine settings at the next environment step.
bool m_ModeSwitched;
/// Pointer to the batcher currently in use by the Academy.
Batcher m_BrainBatcher;
// Flag used to keep track of the first time the Academy is reset.
bool m_FirstAcademyReset;
// The Academy uses a series of events to communicate with agents and
// brains to facilitate synchronization. More specifically, it ensure
// that all the agents performs their steps in a consistent order (i.e. no
// agent can act based on a decision before another agent has had a chance
// to request a decision).
// Signals to all the Brains at each environment step so they can decide
// actions for their agents.
public event System.Action BrainDecideAction;
// Signals to all the listeners that the academy is being destroyed
public event System.Action DestroyAction;
// Signals to all the agents at each environment step along with the
// Academy's maxStepReached, done and stepCount values. The agents rely
// on this event to update their own values of max step reached and done
// in addition to aligning on the step count of the global episode.
public event System.Action<int> AgentSetStatus;
// Signals to all the agents at each environment step so they can reset
// if their flag has been set to done (assuming the agent has requested a
// decision).
public event System.Action AgentResetIfDone;
// Signals to all the agents at each environment step so they can send
// their state to their Brain if they have requested a decision.
public event System.Action AgentSendState;
// Signals to all the agents at each environment step so they can act if
// they have requested a decision.
public event System.Action AgentAct;
// Sigals to all the agents each time the Academy force resets.
public event System.Action AgentForceReset;
/// <summary>
/// Monobehavior function called at the very beginning of environment
/// creation. Academy uses this time to initialize internal data
/// structures, initialize the environment and check for the existence
/// of a communicator.
/// </summary>
void Awake()
{
InitializeEnvironment();
}
// Used to read Python-provided environment parameters
private int ReadArgs()
{
var args = System.Environment.GetCommandLineArgs();
var inputPort = "";
for (var i = 0; i < args.Length; i++)
{
if (args[i] == "--port")
{
inputPort = args[i + 1];
}
}
return int.Parse(inputPort);
}
/// <summary>
/// Initializes the environment, configures it and initialized the Academy.
/// </summary>
private void InitializeEnvironment()
{
m_OriginalGravity = Physics.gravity;
m_OriginalFixedDeltaTime = Time.fixedDeltaTime;
m_OriginalMaximumDeltaTime = Time.maximumDeltaTime;
InitializeAcademy();
ICommunicator communicator;
var exposedBrains = broadcastHub.broadcastingBrains.Where(x => x != null).ToList();
var controlledBrains = broadcastHub.broadcastingBrains.Where(
x => x != null && x is LearningBrain && broadcastHub.IsControlled(x));
foreach (var brain1 in controlledBrains)
{
var brain = (LearningBrain)brain1;
brain.SetToControlledExternally();
}
// Try to launch the communicator by usig the arguments passed at launch
try
{
communicator = new RpcCommunicator(
new CommunicatorParameters
{
port = ReadArgs()
});
}
// If it fails, we check if there are any external brains in the scene
// If there are : Launch the communicator on the default port
// If there arn't, there is no need for a communicator and it is set
// to null
catch
{
communicator = null;
if (controlledBrains.ToList().Count > 0)
{
communicator = new RpcCommunicator(
new CommunicatorParameters
{
port = 5005
});
}
}
m_BrainBatcher = new Batcher(communicator);
foreach (var trainingBrain in exposedBrains)
{
trainingBrain.SetBatcher(m_BrainBatcher);
}
if (communicator != null)
{
m_IsCommunicatorOn = true;
var academyParameters =
new CommunicatorObjects.UnityRLInitializationOutput();
academyParameters.Name = gameObject.name;
academyParameters.Version = k_KApiVersion;
foreach (var brain in exposedBrains)
{
var bp = brain.brainParameters;
academyParameters.BrainParameters.Add(
bp.ToProto(brain.name, broadcastHub.IsControlled(brain)));
}
academyParameters.EnvironmentParameters =
new CommunicatorObjects.EnvironmentParametersProto();
foreach (var key in resetParameters.Keys)
{
academyParameters.EnvironmentParameters.FloatParameters.Add(
key, resetParameters[key]
);
}
var pythonParameters = m_BrainBatcher.SendAcademyParameters(academyParameters);
Random.InitState(pythonParameters.Seed);
}
// If a communicator is enabled/provided, then we assume we are in
// training mode. In the absence of a communicator, we assume we are
// in inference mode.
m_IsInference = !m_IsCommunicatorOn;
BrainDecideAction += () => { };
DestroyAction += () => { };
AgentSetStatus += (i) => { };
AgentResetIfDone += () => { };
AgentSendState += () => { };
AgentAct += () => { };
AgentForceReset += () => { };
// Configure the environment using the configurations provided by
// the developer in the Editor.
SetIsInference(!m_BrainBatcher.GetIsTraining());
ConfigureEnvironment();
}
private void UpdateResetParameters()
{
var newResetParameters = m_BrainBatcher.GetEnvironmentParameters();
if (newResetParameters != null)
{
foreach (var kv in newResetParameters.FloatParameters)
{
resetParameters[kv.Key] = kv.Value;
}
customResetParameters = newResetParameters.CustomResetParameters;
}
}
/// <summary>
/// Configures the environment settings depending on the training/inference
/// mode and the corresponding parameters passed in the Editor.
/// </summary>
void ConfigureEnvironment()
{
if (m_IsInference)
{
ConfigureEnvironmentHelper(m_InferenceConfiguration);
Monitor.SetActive(true);
}
else
{
ConfigureEnvironmentHelper(m_TrainingConfiguration);
Monitor.SetActive(false);
}
}
/// <summary>
/// Helper method for initializing the environment based on the provided
/// configuration.
/// </summary>
/// <param name="config">
/// Environment configuration (specified in the Editor).
/// </param>
static void ConfigureEnvironmentHelper(EnvironmentConfiguration config)
{
Screen.SetResolution(config.width, config.height, false);
QualitySettings.SetQualityLevel(config.qualityLevel, true);
Time.timeScale = config.timeScale;
Time.captureFramerate = 60;
Application.targetFrameRate = config.targetFrameRate;
}
/// <summary>
/// Initializes the academy and environment. Called during the waking-up
/// phase of the environment before any of the scene objects/agents have
/// been initialized.
/// </summary>
public virtual void InitializeAcademy()
{
}
/// <summary>
/// Specifies the academy behavior at every step of the environment.
/// </summary>
public virtual void AcademyStep()
{
}
/// <summary>
/// Specifies the academy behavior when being reset (i.e. at the completion
/// of a global episode).
/// </summary>
public virtual void AcademyReset()
{
}
/// <summary>
/// Returns the <see cref="m_IsInference"/> flag.
/// </summary>
/// <returns>
/// <c>true</c>, if current mode is inference, <c>false</c> if training.
/// </returns>
public bool GetIsInference()
{
return m_IsInference;
}
/// <summary>
/// Sets the <see cref="m_IsInference"/> flag to the provided value. If
/// the new flag differs from the current flag value, this signals that
/// the environment configuration needs to be updated.
/// </summary>
/// <param name="isInference">
/// Environment mode, if true then inference, otherwise training.
/// </param>
public void SetIsInference(bool isInference)
{
if (m_IsInference != isInference)
{
m_IsInference = isInference;
// This signals to the academy that at the next environment step
// the engine configurations need updating to the respective mode
// (i.e. training vs inference) configuraiton.
m_ModeSwitched = true;
}
}
/// <summary>
/// Returns the current episode counter.
/// </summary>
/// <returns>
/// Current episode number.
/// </returns>
public int GetEpisodeCount()
{
return m_EpisodeCount;
}
/// <summary>
/// Returns the current step counter (within the current episode).
/// </summary>
/// <returns>
/// Current step count.
/// </returns>
public int GetStepCount()
{
return m_StepCount;
}
/// <summary>
/// Returns the total step counter.
/// </summary>
/// <returns>
/// Total step count.
/// </returns>
public int GetTotalStepCount()
{
return m_TotalStepCount;
}
/// <summary>
/// Returns whether or not the communicator is on.
/// </summary>
/// <returns>
/// <c>true</c>, if communicator is on, <c>false</c> otherwise.
/// </returns>
public bool IsCommunicatorOn()
{
return m_IsCommunicatorOn;
}
/// <summary>
/// Forces the full reset. The done flags are not affected. Is either
/// called the first reset at inference and every external reset
/// at training.
/// </summary>
void ForcedFullReset()
{
EnvironmentReset();
AgentForceReset();
m_FirstAcademyReset = true;
}
/// <summary>
/// Performs a single environment update to the Academy, Brain and Agent
/// objects within the environment.
/// </summary>
void EnvironmentStep()
{
if (m_ModeSwitched)
{
ConfigureEnvironment();
m_ModeSwitched = false;
}
if ((m_IsCommunicatorOn) &&
(m_LastCommunicatorMessageNumber != m_BrainBatcher.GetNumberMessageReceived()))
{
m_LastCommunicatorMessageNumber = m_BrainBatcher.GetNumberMessageReceived();
if (m_BrainBatcher.GetCommand() ==
CommunicatorObjects.CommandProto.Reset)
{
UpdateResetParameters();
SetIsInference(!m_BrainBatcher.GetIsTraining());
ForcedFullReset();
}
if (m_BrainBatcher.GetCommand() ==
CommunicatorObjects.CommandProto.Quit)
{
#if UNITY_EDITOR
EditorApplication.isPlaying = false;
#endif
Application.Quit();
return;
}
}
else if (!m_FirstAcademyReset)
{
UpdateResetParameters();
ForcedFullReset();
}
AgentSetStatus(m_StepCount);
AgentResetIfDone();
AgentSendState();
BrainDecideAction();
AcademyStep();
AgentAct();
m_StepCount += 1;
m_TotalStepCount += 1;
}
/// <summary>
/// Resets the environment, including the Academy.
/// </summary>
void EnvironmentReset()
{
m_StepCount = 0;
m_EpisodeCount++;
AcademyReset();
}
/// <summary>
/// Monobehavior function that dictates each environment step.
/// </summary>
void FixedUpdate()
{
EnvironmentStep();
}
/// <summary>
/// Cleanup function
/// </summary>
protected virtual void OnDestroy()
{
Physics.gravity = m_OriginalGravity;
Time.fixedDeltaTime = m_OriginalFixedDeltaTime;
Time.maximumDeltaTime = m_OriginalMaximumDeltaTime;
// Signal to listeners that the academy is being destroyed now
DestroyAction();
}
}
}
| 600 |
ml-agents | openai | C# | using System;
using System.Collections.Generic;
using System.Linq;
namespace MLAgents
{
public class ActionMasker
{
/// When using discrete control, is the starting indices of the actions
/// when all the branches are concatenated with each other.
private int[] m_StartingActionIndices;
private bool[] m_CurrentMask;
private readonly BrainParameters m_BrainParameters;
public ActionMasker(BrainParameters brainParameters)
{
m_BrainParameters = brainParameters;
}
/// <summary>
/// Modifies an action mask for discrete control agents. When used, the agent will not be
/// able to perform the action passed as argument at the next decision. If no branch is
/// specified, the default branch will be 0. The actionIndex or actionIndices correspond
/// to the action the agent will be unable to perform.
/// </summary>
/// <param name="branch">The branch for which the actions will be masked</param>
/// <param name="actionIndices">The indices of the masked actions</param>
public void SetActionMask(int branch, IEnumerable<int> actionIndices)
{
// If the branch does not exist, raise an error
if (branch >= m_BrainParameters.vectorActionSize.Length)
throw new UnityAgentsException(
"Invalid Action Masking : Branch " + branch + " does not exist.");
var totalNumberActions = m_BrainParameters.vectorActionSize.Sum();
// By default, the masks are null. If we want to specify a new mask, we initialize
// the actionMasks with trues.
if (m_CurrentMask == null)
{
m_CurrentMask = new bool[totalNumberActions];
}
// If this is the first time the masked actions are used, we generate the starting
// indices for each branch.
if (m_StartingActionIndices == null)
{
m_StartingActionIndices = Utilities.CumSum(m_BrainParameters.vectorActionSize);
}
// Perform the masking
foreach (var actionIndex in actionIndices)
{
if (actionIndex >= m_BrainParameters.vectorActionSize[branch])
{
throw new UnityAgentsException(
"Invalid Action Masking: Action Mask is too large for specified branch.");
}
m_CurrentMask[actionIndex + m_StartingActionIndices[branch]] = true;
}
}
/// <summary>
/// Get the current mask for an agent
/// </summary>
/// <returns>A mask for the agent. A boolean array of length equal to the total number of
/// actions.</returns>
public bool[] GetMask()
{
if (m_CurrentMask != null)
{
AssertMask();
}
return m_CurrentMask;
}
/// <summary>
/// Makes sure that the current mask is usable.
/// </summary>
private void AssertMask()
{
// Action Masks can only be used in Discrete Control.
if (m_BrainParameters.vectorActionSpaceType != SpaceType.Discrete)
{
throw new UnityAgentsException(
"Invalid Action Masking : Can only set action mask for Discrete Control.");
}
var numBranches = m_BrainParameters.vectorActionSize.Length;
for (var branchIndex = 0; branchIndex < numBranches; branchIndex++)
{
if (AreAllActionsMasked(branchIndex))
{
throw new UnityAgentsException(
"Invalid Action Masking : All the actions of branch " + branchIndex +
" are masked.");
}
}
}
/// <summary>
/// Resets the current mask for an agent
/// </summary>
public void ResetMask()
{
if (m_CurrentMask != null)
{
Array.Clear(m_CurrentMask, 0, m_CurrentMask.Length);
}
}
/// <summary>
/// Checks if all the actions in the input branch are masked
/// </summary>
/// <param name="branch"> The index of the branch to check</param>
/// <returns> True if all the actions of the branch are masked</returns>
private bool AreAllActionsMasked(int branch)
{
if (m_CurrentMask == null)
{
return false;
}
var start = m_StartingActionIndices[branch];
var end = m_StartingActionIndices[branch + 1];
for (var i = start; i < end; i++)
{
if (!m_CurrentMask[i])
{
return false;
}
}
return true;
}
}
}
| 138 |
ml-agents | openai | C# | using System.Collections.Generic;
using Google.Protobuf;
using MLAgents.CommunicatorObjects;
using UnityEngine;
namespace MLAgents
{
/// <summary>
/// Struct that contains all the information for an Agent, including its
/// observations, actions and current status, that is sent to the Brain.
/// </summary>
public struct AgentInfo
{
/// <summary>
/// Most recent agent vector (i.e. numeric) observation.
/// </summary>
public List<float> vectorObservation;
/// <summary>
/// The previous agent vector observations, stacked. The length of the
/// history (i.e. number of vector observations to stack) is specified
/// in the Brain parameters.
/// </summary>
public List<float> stackedVectorObservation;
/// <summary>
/// Most recent agent camera (i.e. texture) observation.
/// </summary>
public List<Texture2D> visualObservations;
/// <summary>
/// Most recent text observation.
/// </summary>
public string textObservation;
/// <summary>
/// Keeps track of the last vector action taken by the Brain.
/// </summary>
public float[] storedVectorActions;
/// <summary>
/// Keeps track of the last text action taken by the Brain.
/// </summary>
public string storedTextActions;
/// <summary>
/// For discrete control, specifies the actions that the agent cannot take. Is true if
/// the action is masked.
/// </summary>
public bool[] actionMasks;
/// <summary>
/// Used by the Trainer to store information about the agent. This data
/// structure is not consumed or modified by the agent directly, they are
/// just the owners of their trainier's memory. Currently, however, the
/// size of the memory is in the Brain properties.
/// </summary>
public List<float> memories;
/// <summary>
/// Current agent reward.
/// </summary>
public float reward;
/// <summary>
/// Whether the agent is done or not.
/// </summary>
public bool done;
/// <summary>
/// Whether the agent has reached its max step count for this episode.
/// </summary>
public bool maxStepReached;
/// <summary>
/// Unique identifier each agent receives at initialization. It is used
/// to separate between different agents in the environment.
/// </summary>
public int id;
/// <summary>
/// User-customizable object for sending structured output from Unity to Python in response
/// to an action in addition to a scalar reward.
/// </summary>
public CustomObservation customObservation;
/// <summary>
/// Converts a AgentInfo to a protobuffer generated AgentInfoProto
/// </summary>
/// <returns>The protobuf version of the AgentInfo.</returns>
public AgentInfoProto ToProto()
{
var agentInfoProto = new AgentInfoProto
{
StackedVectorObservation = { stackedVectorObservation },
StoredVectorActions = { storedVectorActions },
StoredTextActions = storedTextActions,
TextObservation = textObservation,
Reward = reward,
MaxStepReached = maxStepReached,
Done = done,
Id = id,
CustomObservation = customObservation
};
if (memories != null)
{
agentInfoProto.Memories.Add(memories);
}
if (actionMasks != null)
{
agentInfoProto.ActionMask.AddRange(actionMasks);
}
foreach (var obs in visualObservations)
{
agentInfoProto.VisualObservations.Add(
ByteString.CopyFrom(obs.EncodeToPNG())
);
}
return agentInfoProto;
}
/// <summary>
/// Remove the visual observations from memory. Call at each timestep
/// to avoid memory leaks.
/// </summary>
public void ClearVisualObs()
{
foreach (var obs in visualObservations)
{
Object.Destroy(obs);
}
visualObservations.Clear();
}
}
/// <summary>
/// Struct that contains the action information sent from the Brain to the
/// Agent.
/// </summary>
public struct AgentAction
{
public float[] vectorActions;
public string textActions;
public List<float> memories;
public float value;
public CustomAction customAction;
}
/// <summary>
/// Struct that contains all the Agent-specific parameters provided in the
/// Editor. This excludes the Brain linked to the Agent since it can be
/// modified programmatically.
/// </summary>
[System.Serializable]
public class AgentParameters
{
/// <summary>
/// The list of the Camera GameObjects the agent uses for visual
/// observations.
/// </summary>
public List<Camera> agentCameras = new List<Camera>();
/// <summary>
/// The list of the RenderTextures the agent uses for visual
/// observations.
/// </summary>
public List<RenderTexture> agentRenderTextures = new List<RenderTexture>();
/// <summary>
/// The maximum number of steps the agent takes before being done.
/// </summary>
/// <remarks>
/// If set to 0, the agent can only be set to done programmatically (or
/// when the Academy is done).
/// If set to any positive integer, the agent will be set to done after
/// that many steps. Note that setting the max step to a value greater
/// than the academy max step value renders it useless.
/// </remarks>
public int maxStep;
/// <summary>
/// Determines the behaviour of the agent when done.
/// </summary>
/// <remarks>
/// If true, the agent will reset when done and start a new episode.
/// Otherwise, the agent will remain done and its behavior will be
/// dictated by the AgentOnDone method.
/// </remarks>
public bool resetOnDone = true;
/// <summary>
/// Whether to enable On Demand Decisions or make a decision at
/// every step.
/// </summary>
public bool onDemandDecision;
/// <summary>
/// Number of actions between decisions (used when On Demand Decisions
/// is turned off).
/// </summary>
public int numberOfActionsBetweenDecisions;
}
/// <summary>
/// Agent Monobehavior class that is attached to a Unity GameObject, making it
/// an Agent. An agent produces observations and takes actions in the
/// environment. Observations are determined by the cameras attached
/// to the agent in addition to the vector observations implemented by the
/// user in <see cref="CollectObservations"/>. On the other hand, actions
/// are determined by decisions produced by a linked Brain. Currently, this
/// class is expected to be extended to implement the desired agent behavior.
/// </summary>
/// <remarks>
/// Simply speaking, an agent roams through an environment and at each step
/// of the environment extracts its current observation, sends them to its
/// linked brain and in return receives an action from its brain. In practice,
/// however, an agent need not send its observation at every step since very
/// little may have changed between sucessive steps. Currently, how often an
/// agent updates its brain with a fresh observation is determined by the
/// Academy.
///
/// At any step, an agent may be considered <see cref="m_Done"/>.
/// This could occur due to a variety of reasons:
/// - The agent reached an end state within its environment.
/// - The agent reached the maximum # of steps (i.e. timed out).
/// - The academy reached the maximum # of steps (forced agent to be done).
///
/// Here, an agent reaches an end state if it completes its task successfully
/// or somehow fails along the way. In the case where an agent is done before
/// the academy, it either resets and restarts, or just lingers until the
/// academy is done.
///
/// An important note regarding steps and episodes is due. Here, an agent step
/// corresponds to an academy step, which also corresponds to Unity
/// environment step (i.e. each FixedUpdate call). This is not the case for
/// episodes. The academy controls the global episode count and each agent
/// controls its own local episode count and can reset and start a new local
/// episode independently (based on its own experience). Thus an academy
/// (global) episode can be viewed as the upper-bound on an agents episode
/// length and that within a single global episode, an agent may have completed
/// multiple local episodes. Consequently, if an agent max step is
/// set to a value larger than the academy max steps value, then the academy
/// value takes precedence (since the agent max step will never be reached).
///
/// Lastly, note that at any step the brain linked to the agent is allowed to
/// change programmatically with <see cref="GiveBrain"/>.
///
/// Implementation-wise, it is required that this class is extended and the
/// virtual methods overridden. For sample implementations of agent behavior,
/// see the Examples/ directory within this Unity project.
/// </remarks>
[HelpURL("https://github.com/Unity-Technologies/ml-agents/blob/master/" +
"docs/Learning-Environment-Design-Agents.md")]
[System.Serializable]
public abstract class Agent : MonoBehaviour
{
/// <summary>
/// The Brain attached to this agent. A brain can be attached either
/// directly from the Editor through AgentEditor or
/// programmatically through <see cref="GiveBrain"/>. It is OK for an agent
/// to not have a brain, as long as no decision is requested.
/// </summary>
[HideInInspector] public Brain brain;
/// <summary>
/// Agent parameters specified within the Editor via AgentEditor.
/// </summary>
[HideInInspector] public AgentParameters agentParameters;
/// Current Agent information (message sent to Brain).
AgentInfo m_Info;
/// Current Agent action (message sent from Brain).
AgentAction m_Action;
/// Represents the reward the agent accumulated during the current step.
/// It is reset to 0 at the beginning of every step.
/// Should be set to a positive value when the agent performs a "good"
/// action that we wish to reinforce/reward, and set to a negative value
/// when the agent performs a "bad" action that we wish to punish/deter.
/// Additionally, the magnitude of the reward should not exceed 1.0
float m_Reward;
/// Keeps track of the cumulative reward in this episode.
float m_CumulativeReward;
/// Whether or not the agent requests an action.
bool m_RequestAction;
/// Whether or not the agent requests a decision.
bool m_RequestDecision;
/// Whether or not the agent has completed the episode. This may be due
/// to either reaching a success or fail state, or reaching the maximum
/// number of steps (i.e. timing out).
bool m_Done;
/// Whether or not the agent reached the maximum number of steps.
bool m_MaxStepReached;
/// Keeps track of the number of steps taken by the agent in this episode.
/// Note that this value is different for each agent, and may not overlap
/// with the step counter in the Academy, since agents reset based on
/// their own experience.
int m_StepCount;
/// Flag to signify that an agent has been reset but the fact that it is
/// done has not been communicated (required for On Demand Decisions).
bool m_HasAlreadyReset;
/// Flag to signify that an agent is done and should not reset until
/// the fact that it is done has been communicated.
bool m_Terminate;
/// Unique identifier each agent receives at initialization. It is used
/// to separate between different agents in the environment.
int m_Id;
/// Keeps track of the actions that are masked at each step.
private ActionMasker m_ActionMasker;
/// <summary>
/// Demonstration recorder.
/// </summary>
private DemonstrationRecorder m_Recorder;
/// Monobehavior function that is called when the attached GameObject
/// becomes enabled or active.
void OnEnable()
{
m_Id = gameObject.GetInstanceID();
var academy = FindObjectOfType<Academy>();
OnEnableHelper(academy);
m_Recorder = GetComponent<DemonstrationRecorder>();
}
/// Helper method for the <see cref="OnEnable"/> event, created to
/// facilitate testing.
void OnEnableHelper(Academy academy)
{
m_Info = new AgentInfo();
m_Action = new AgentAction();
if (academy == null)
{
throw new UnityAgentsException(
"No Academy Component could be found in the scene.");
}
academy.AgentSetStatus += SetStatus;
academy.AgentResetIfDone += ResetIfDone;
academy.AgentSendState += SendInfo;
academy.AgentAct += AgentStep;
academy.AgentForceReset += _AgentReset;
if (brain != null)
{
ResetData();
}
else
{
Debug.Log(
string.Format(
"The Agent component attached to the " +
"GameObject {0} was initialized without a brain.",
gameObject.name));
}
InitializeAgent();
}
/// Monobehavior function that is called when the attached GameObject
/// becomes disabled or inactive.
void OnDisable()
{
var academy = FindObjectOfType<Academy>();
if (academy != null)
{
academy.AgentSetStatus -= SetStatus;
academy.AgentResetIfDone -= ResetIfDone;
academy.AgentSendState -= SendInfo;
academy.AgentAct -= AgentStep;
academy.AgentForceReset -= ForceReset;
}
}
/// <summary>
/// Updates the Brain for the agent. Any brain currently assigned to the
/// agent will be replaced with the provided one.
/// </summary>
/// <remarks>
/// The agent unsubscribes from its current brain (if it has one) and
/// subscribes to the provided brain. This enables contextual brains, that
/// is, updating the behaviour (hence brain) of the agent depending on
/// the context of the game. For example, we may utilize one (wandering)
/// brain when an agent is randomly exploring an open world, but switch
/// to another (fighting) brain when it comes into contact with an enemy.
/// </remarks>
/// <param name="givenBrain">New brain to subscribe this agent to</param>
public void GiveBrain(Brain givenBrain)
{
brain = givenBrain;
ResetData();
}
/// <summary>
/// Returns the current step counter (within the current epside).
/// </summary>
/// <returns>
/// Current episode number.
/// </returns>
public int GetStepCount()
{
return m_StepCount;
}
/// <summary>
/// Resets the step reward and possibly the episode reward for the agent.
/// </summary>
public void ResetReward()
{
m_Reward = 0f;
if (m_Done)
{
m_CumulativeReward = 0f;
}
}
/// <summary>
/// Overrides the current step reward of the agent and updates the episode
/// reward accordingly.
/// </summary>
/// <param name="reward">The new value of the reward.</param>
public void SetReward(float reward)
{
m_CumulativeReward += (reward - m_Reward);
m_Reward = reward;
}
/// <summary>
/// Increments the step and episode rewards by the provided value.
/// </summary>
/// <param name="increment">Incremental reward value.</param>
public void AddReward(float increment)
{
m_Reward += increment;
m_CumulativeReward += increment;
}
/// <summary>
/// Retrieves the step reward for the Agent.
/// </summary>
/// <returns>The step reward.</returns>
public float GetReward()
{
return m_Reward;
}
/// <summary>
/// Retrieves the episode reward for the Agent.
/// </summary>
/// <returns>The episode reward.</returns>
public float GetCumulativeReward()
{
return m_CumulativeReward;
}
/// <summary>
/// Sets the done flag to true.
/// </summary>
public void Done()
{
m_Done = true;
}
/// <summary>
/// Is called when the agent must request the brain for a new decision.
/// </summary>
public void RequestDecision()
{
m_RequestDecision = true;
RequestAction();
}
/// <summary>
/// Is called then the agent must perform a new action.
/// </summary>
public void RequestAction()
{
m_RequestAction = true;
}
/// <summary>
/// Indicates if the agent has reached his maximum number of steps.
/// </summary>
/// <returns>
/// <c>true</c>, if max step reached was reached, <c>false</c> otherwise.
/// </returns>
public bool IsMaxStepReached()
{
return m_MaxStepReached;
}
/// <summary>
/// Indicates if the agent is done
/// </summary>
/// <returns>
/// <c>true</c>, if the agent is done, <c>false</c> otherwise.
/// </returns>
public bool IsDone()
{
return m_Done;
}
/// Helper function that resets all the data structures associated with
/// the agent. Typically used when the agent is being initialized or reset
/// at the end of an episode.
void ResetData()
{
if (brain == null)
{
return;
}
var param = brain.brainParameters;
m_ActionMasker = new ActionMasker(param);
// If we haven't initialized vectorActions, initialize to 0. This should only
// happen during the creation of the Agent. In subsequent episodes, vectorAction
// should stay the previous action before the Done(), so that it is properly recorded.
if (m_Action.vectorActions == null)
{
if (param.vectorActionSpaceType == SpaceType.Continuous)
{
m_Action.vectorActions = new float[param.vectorActionSize[0]];
m_Info.storedVectorActions = new float[param.vectorActionSize[0]];
}
else
{
m_Action.vectorActions = new float[param.vectorActionSize.Length];
m_Info.storedVectorActions = new float[param.vectorActionSize.Length];
}
}
if (m_Info.textObservation == null)
m_Info.textObservation = "";
m_Action.textActions = "";
m_Info.memories = new List<float>();
m_Action.memories = new List<float>();
m_Info.vectorObservation =
new List<float>(param.vectorObservationSize);
m_Info.stackedVectorObservation =
new List<float>(param.vectorObservationSize
* brain.brainParameters.numStackedVectorObservations);
m_Info.stackedVectorObservation.AddRange(
new float[param.vectorObservationSize
* param.numStackedVectorObservations]);
m_Info.visualObservations = new List<Texture2D>();
m_Info.customObservation = null;
}
/// <summary>
/// Initializes the agent, called once when the agent is enabled. Can be
/// left empty if there is no special, unique set-up behavior for the
/// agent.
/// </summary>
/// <remarks>
/// One sample use is to store local references to other objects in the
/// scene which would facilitate computing this agents observation.
/// </remarks>
public virtual void InitializeAgent()
{
}
/// <summary>
/// Sends the Agent info to the linked Brain.
/// </summary>
void SendInfoToBrain()
{
if (brain == null)
{
return;
}
m_Info.memories = m_Action.memories;
m_Info.storedVectorActions = m_Action.vectorActions;
m_Info.storedTextActions = m_Action.textActions;
m_Info.vectorObservation.Clear();
m_ActionMasker.ResetMask();
CollectObservations();
m_Info.actionMasks = m_ActionMasker.GetMask();
var param = brain.brainParameters;
if (m_Info.vectorObservation.Count != param.vectorObservationSize)
{
throw new UnityAgentsException(string.Format(
"Vector Observation size mismatch between continuous " +
"agent {0} and brain {1}. " +
"Was Expecting {2} but received {3}. ",
gameObject.name, brain.name,
brain.brainParameters.vectorObservationSize,
m_Info.vectorObservation.Count));
}
Utilities.ShiftLeft(m_Info.stackedVectorObservation, param.vectorObservationSize);
Utilities.ReplaceRange(m_Info.stackedVectorObservation, m_Info.vectorObservation,
m_Info.stackedVectorObservation.Count - m_Info.vectorObservation.Count);
m_Info.visualObservations.Clear();
var visualObservationCount = agentParameters.agentCameras.Count + agentParameters.agentRenderTextures.Count;
if (param.cameraResolutions.Length > visualObservationCount)
{
throw new UnityAgentsException(string.Format(
"Not enough cameras/renderTextures for agent {0} : Brain {1} expecting at " +
"least {2} cameras/renderTextures but only {3} were present.",
gameObject.name, brain.name,
brain.brainParameters.cameraResolutions.Length,
visualObservationCount));
}
//First add all cameras
for (var i = 0; i < agentParameters.agentCameras.Count; i++)
{
var obsTexture = ObservationToTexture(
agentParameters.agentCameras[i],
param.cameraResolutions[i].width,
param.cameraResolutions[i].height);
m_Info.visualObservations.Add(obsTexture);
}
//Then add all renderTextures
var camCount = agentParameters.agentCameras.Count;
for (var i = 0; i < agentParameters.agentRenderTextures.Count; i++)
{
var obsTexture = ObservationToTexture(
agentParameters.agentRenderTextures[i],
param.cameraResolutions[camCount + i].width,
param.cameraResolutions[camCount + i].height);
m_Info.visualObservations.Add(obsTexture);
}
m_Info.reward = m_Reward;
m_Info.done = m_Done;
m_Info.maxStepReached = m_MaxStepReached;
m_Info.id = m_Id;
brain.SendState(this, m_Info);
if (m_Recorder != null && m_Recorder.record && Application.isEditor)
{
m_Recorder.WriteExperience(m_Info);
}
m_Info.textObservation = "";
}
/// <summary>
/// Collects the (vector, visual, text) observations of the agent.
/// The agent observation describes the current environment from the
/// perspective of the agent.
/// </summary>
/// <remarks>
/// Simply, an agents observation is any environment information that helps
/// the Agent acheive its goal. For example, for a fighting Agent, its
/// observation could include distances to friends or enemies, or the
/// current level of ammunition at its disposal.
/// Recall that an Agent may attach vector, visual or textual observations.
/// Vector observations are added by calling the provided helper methods:
/// - <see cref="AddVectorObs(int)"/>
/// - <see cref="AddVectorObs(float)"/>
/// - <see cref="AddVectorObs(Vector3)"/>
/// - <see cref="AddVectorObs(Vector2)"/>
/// - <see>
/// <cref>AddVectorObs(float[])</cref>
/// </see>
/// - <see>
/// <cref>AddVectorObs(List{float})</cref>
/// </see>
/// - <see cref="AddVectorObs(Quaternion)"/>
/// - <see cref="AddVectorObs(bool)"/>
/// - <see cref="AddVectorObs(int, int)"/>
/// Depending on your environment, any combination of these helpers can
/// be used. They just need to be used in the exact same order each time
/// this method is called and the resulting size of the vector observation
/// needs to match the vectorObservationSize attribute of the linked Brain.
/// Visual observations are implicitly added from the cameras attached to
/// the Agent.
/// Lastly, textual observations are added using
/// <see cref="SetTextObs(string)"/>.
/// </remarks>
public virtual void CollectObservations()
{
}
/// <summary>
/// Sets an action mask for discrete control agents. When used, the agent will not be
/// able to perform the action passed as argument at the next decision. If no branch is
/// specified, the default branch will be 0. The actionIndex or actionIndices correspond
/// to the action the agent will be unable to perform.
/// </summary>
/// <param name="actionIndices">The indices of the masked actions on branch 0</param>
protected void SetActionMask(IEnumerable<int> actionIndices)
{
m_ActionMasker.SetActionMask(0, actionIndices);
}
/// <summary>
/// Sets an action mask for discrete control agents. When used, the agent will not be
/// able to perform the action passed as argument at the next decision. If no branch is
/// specified, the default branch will be 0. The actionIndex or actionIndices correspond
/// to the action the agent will be unable to perform.
/// </summary>
/// <param name="actionIndex">The index of the masked action on branch 0</param>
protected void SetActionMask(int actionIndex)
{
m_ActionMasker.SetActionMask(0, new[] { actionIndex });
}
/// <summary>
/// Sets an action mask for discrete control agents. When used, the agent will not be
/// able to perform the action passed as argument at the next decision. If no branch is
/// specified, the default branch will be 0. The actionIndex or actionIndices correspond
/// to the action the agent will be unable to perform.
/// </summary>
/// <param name="branch">The branch for which the actions will be masked</param>
/// <param name="actionIndex">The index of the masked action</param>
protected void SetActionMask(int branch, int actionIndex)
{
m_ActionMasker.SetActionMask(branch, new[] { actionIndex });
}
/// <summary>
/// Modifies an action mask for discrete control agents. When used, the agent will not be
/// able to perform the action passed as argument at the next decision. If no branch is
/// specified, the default branch will be 0. The actionIndex or actionIndices correspond
/// to the action the agent will be unable to perform.
/// </summary>
/// <param name="branch">The branch for which the actions will be masked</param>
/// <param name="actionIndices">The indices of the masked actions</param>
protected void SetActionMask(int branch, IEnumerable<int> actionIndices)
{
m_ActionMasker.SetActionMask(branch, actionIndices);
}
/// <summary>
/// Adds a float observation to the vector observations of the agent.
/// Increases the size of the agents vector observation by 1.
/// </summary>
/// <param name="observation">Observation.</param>
protected void AddVectorObs(float observation)
{
m_Info.vectorObservation.Add(observation);
}
/// <summary>
/// Adds an integer observation to the vector observations of the agent.
/// Increases the size of the agents vector observation by 1.
/// </summary>
/// <param name="observation">Observation.</param>
protected void AddVectorObs(int observation)
{
m_Info.vectorObservation.Add(observation);
}
/// <summary>
/// Adds an Vector3 observation to the vector observations of the agent.
/// Increases the size of the agents vector observation by 3.
/// </summary>
/// <param name="observation">Observation.</param>
protected void AddVectorObs(Vector3 observation)
{
m_Info.vectorObservation.Add(observation.x);
m_Info.vectorObservation.Add(observation.y);
m_Info.vectorObservation.Add(observation.z);
}
/// <summary>
/// Adds an Vector2 observation to the vector observations of the agent.
/// Increases the size of the agents vector observation by 2.
/// </summary>
/// <param name="observation">Observation.</param>
protected void AddVectorObs(Vector2 observation)
{
m_Info.vectorObservation.Add(observation.x);
m_Info.vectorObservation.Add(observation.y);
}
/// <summary>
/// Adds a collection of float observations to the vector observations of the agent.
/// Increases the size of the agents vector observation by size of the collection.
/// </summary>
/// <param name="observation">Observation.</param>
protected void AddVectorObs(IEnumerable<float> observation)
{
m_Info.vectorObservation.AddRange(observation);
}
/// <summary>
/// Adds a quaternion observation to the vector observations of the agent.
/// Increases the size of the agents vector observation by 4.
/// </summary>
/// <param name="observation">Observation.</param>
protected void AddVectorObs(Quaternion observation)
{
m_Info.vectorObservation.Add(observation.x);
m_Info.vectorObservation.Add(observation.y);
m_Info.vectorObservation.Add(observation.z);
m_Info.vectorObservation.Add(observation.w);
}
/// <summary>
/// Adds a boolean observation to the vector observation of the agent.
/// Increases the size of the agent's vector observation by 1.
/// </summary>
/// <param name="observation"></param>
protected void AddVectorObs(bool observation)
{
m_Info.vectorObservation.Add(observation ? 1f : 0f);
}
protected void AddVectorObs(int observation, int range)
{
var oneHotVector = new float[range];
oneHotVector[observation] = 1;
m_Info.vectorObservation.AddRange(oneHotVector);
}
/// <summary>
/// Sets the text observation.
/// </summary>
/// <param name="textObservation">The text observation.</param>
public void SetTextObs(string textObservation)
{
m_Info.textObservation = textObservation;
}
/// <summary>
/// Specifies the agent behavior at every step based on the provided
/// action.
/// </summary>
/// <param name="vectorAction">
/// Vector action. Note that for discrete actions, the provided array
/// will be of length 1.
/// </param>
/// <param name="textAction">Text action.</param>
public virtual void AgentAction(float[] vectorAction, string textAction)
{
}
/// <summary>
/// Specifies the agent behavior at every step based on the provided
/// action.
/// </summary>
/// <param name="vectorAction">
/// Vector action. Note that for discrete actions, the provided array
/// will be of length 1.
/// </param>
/// <param name="textAction">Text action.</param>
/// <param name="customAction">
/// A custom action, defined by the user as custom protobuf message. Useful if the action is hard to encode
/// as either a flat vector or a single string.
/// </param>
public virtual void AgentAction(float[] vectorAction, string textAction, CustomAction customAction)
{
// We fall back to not using the custom action if the subclassed Agent doesn't override this method.
AgentAction(vectorAction, textAction);
}
/// <summary>
/// Specifies the agent behavior when done and
/// <see cref="AgentParameters.resetOnDone"/> is false. This method can be
/// used to remove the agent from the scene.
/// </summary>
public virtual void AgentOnDone()
{
}
/// <summary>
/// Specifies the agent behavior when being reset, which can be due to
/// the agent or Academy being done (i.e. completion of local or global
/// episode).
/// </summary>
public virtual void AgentReset()
{
}
/// <summary>
/// This method will forcefully reset the agent and will also reset the hasAlreadyReset flag.
/// This way, even if the agent was already in the process of reseting, it will be reset again
/// and will not send a Done flag at the next step.
/// </summary>
void ForceReset()
{
m_HasAlreadyReset = false;
_AgentReset();
}
/// <summary>
/// An internal reset method that updates internal data structures in
/// addition to calling <see cref="AgentReset"/>.
/// </summary>
void _AgentReset()
{
ResetData();
m_StepCount = 0;
AgentReset();
}
/// <summary>
/// Updates the vector action.
/// </summary>
/// <param name="vectorActions">Vector actions.</param>
public void UpdateVectorAction(float[] vectorActions)
{
m_Action.vectorActions = vectorActions;
}
/// <summary>
/// Updates the memories action.
/// </summary>
/// <param name="memories">Memories.</param>
public void UpdateMemoriesAction(List<float> memories)
{
m_Action.memories = memories;
}
public void AppendMemoriesAction(List<float> memories)
{
m_Action.memories.AddRange(memories);
}
public List<float> GetMemoriesAction()
{
return m_Action.memories;
}
/// <summary>
/// Updates the text action.
/// </summary>
/// <param name="textActions">Text actions.</param>
public void UpdateTextAction(string textActions)
{
m_Action.textActions = textActions;
}
/// <summary>
/// Updates the custom action.
/// </summary>
/// <param name="customAction">Custom action.</param>
public void UpdateCustomAction(CustomAction customAction)
{
m_Action.customAction = customAction;
}
/// <summary>
/// Updates the value of the agent.
/// </summary>
public void UpdateValueAction(float value)
{
m_Action.value = value;
}
protected float GetValueEstimate()
{
return m_Action.value;
}
/// <summary>
/// Scales continuous action from [-1, 1] to arbitrary range.
/// </summary>
/// <param name="rawAction"></param>
/// <param name="min"></param>
/// <param name="max"></param>
/// <returns></returns>
protected float ScaleAction(float rawAction, float min, float max)
{
var middle = (min + max) / 2;
var range = (max - min) / 2;
return rawAction * range + middle;
}
/// <summary>
/// Sets the status of the agent. Will request decisions or actions according
/// to the Academy's stepcount.
/// </summary>
/// <param name="academyStepCounter">Number of current steps in episode</param>
void SetStatus(int academyStepCounter)
{
MakeRequests(academyStepCounter);
}
/// Signals the agent that it must reset if its done flag is set to true.
void ResetIfDone()
{
// If an agent is done, then it will also
// request for a decision and an action
if (IsDone())
{
if (agentParameters.resetOnDone)
{
if (agentParameters.onDemandDecision)
{
if (!m_HasAlreadyReset)
{
// If event based, the agent can reset as soon
// as it is done
_AgentReset();
m_HasAlreadyReset = true;
}
}
else if (m_RequestDecision)
{
// If not event based, the agent must wait to request a
// decision before resetting to keep multiple agents in sync.
_AgentReset();
}
}
else
{
m_Terminate = true;
RequestDecision();
}
}
}
/// <summary>
/// Signals the agent that it must sent its decision to the brain.
/// </summary>
void SendInfo()
{
if (m_RequestDecision)
{
SendInfoToBrain();
ResetReward();
m_Done = false;
m_MaxStepReached = false;
m_RequestDecision = false;
m_HasAlreadyReset = false;
}
}
/// Used by the brain to make the agent perform a step.
void AgentStep()
{
if (m_Terminate)
{
m_Terminate = false;
ResetReward();
m_Done = false;
m_MaxStepReached = false;
m_RequestDecision = false;
m_RequestAction = false;
m_HasAlreadyReset = false;
OnDisable();
AgentOnDone();
}
if ((m_RequestAction) && (brain != null))
{
m_RequestAction = false;
AgentAction(m_Action.vectorActions, m_Action.textActions, m_Action.customAction);
}
if ((m_StepCount >= agentParameters.maxStep)
&& (agentParameters.maxStep > 0))
{
m_MaxStepReached = true;
Done();
}
m_StepCount += 1;
}
/// <summary>
/// Is called after every step, contains the logic to decide if the agent
/// will request a decision at the next step.
/// </summary>
void MakeRequests(int academyStepCounter)
{
agentParameters.numberOfActionsBetweenDecisions =
Mathf.Max(agentParameters.numberOfActionsBetweenDecisions, 1);
if (!agentParameters.onDemandDecision)
{
RequestAction();
if (academyStepCounter %
agentParameters.numberOfActionsBetweenDecisions == 0)
{
RequestDecision();
}
}
}
/// <summary>
/// Converts a camera and corresponding resolution to a 2D texture.
/// </summary>
/// <returns>The 2D texture.</returns>
/// <param name="obsCamera">Camera.</param>
/// <param name="width">Width of resulting 2D texture.</param>
/// <param name="height">Height of resulting 2D texture.</param>
/// <returns name="texture2D">Texture2D to render to.</returns>
public static Texture2D ObservationToTexture(Camera obsCamera, int width, int height)
{
var texture2D = new Texture2D(width, height, TextureFormat.RGB24, false);
var oldRec = obsCamera.rect;
obsCamera.rect = new Rect(0f, 0f, 1f, 1f);
var depth = 24;
var format = RenderTextureFormat.Default;
var readWrite = RenderTextureReadWrite.Default;
var tempRt =
RenderTexture.GetTemporary(width, height, depth, format, readWrite);
var prevActiveRt = RenderTexture.active;
var prevCameraRt = obsCamera.targetTexture;
// render to offscreen texture (readonly from CPU side)
RenderTexture.active = tempRt;
obsCamera.targetTexture = tempRt;
obsCamera.Render();
texture2D.ReadPixels(new Rect(0, 0, texture2D.width, texture2D.height), 0, 0);
obsCamera.targetTexture = prevCameraRt;
obsCamera.rect = oldRec;
RenderTexture.active = prevActiveRt;
RenderTexture.ReleaseTemporary(tempRt);
return texture2D;
}
/// <summary>
/// Converts a RenderTexture and correspinding resolution to a 2D texture.
/// </summary>
/// <returns>The 2D texture.</returns>
/// <param name="obsTexture">RenderTexture.</param>
/// <param name="width">Width of resulting 2D texture.</param>
/// <param name="height">Height of resulting 2D texture.</param>
/// <returns name="texture2D">Texture2D to render to.</returns>
public static Texture2D ObservationToTexture(RenderTexture obsTexture, int width, int height)
{
var texture2D = new Texture2D(width, height, TextureFormat.RGB24, false);
if (width != texture2D.width || height != texture2D.height)
{
texture2D.Resize(width, height);
}
if (width != obsTexture.width || height != obsTexture.height)
{
throw new UnityAgentsException(string.Format(
"RenderTexture {0} : width/height is {1}/{2} brain is expecting {3}/{4}.",
obsTexture.name, obsTexture.width, obsTexture.height, width, height));
}
var prevActiveRt = RenderTexture.active;
RenderTexture.active = obsTexture;
texture2D.ReadPixels(new Rect(0, 0, texture2D.width, texture2D.height), 0, 0);
texture2D.Apply();
RenderTexture.active = prevActiveRt;
return texture2D;
}
/// <summary>
/// Sets the custom observation for the agent for this episode.
/// </summary>
/// <param name="customObservation">New value of the agent's custom observation.</param>
public void SetCustomObservation(CustomObservation customObservation)
{
m_Info.customObservation = customObservation;
}
}
}
| 1,182 |
ml-agents | openai | C# | using System.Collections.Generic;
using System.Linq;
using System;
using UnityEngine;
namespace MLAgents
{
/// <summary>
/// The batcher is an RL specific class that makes sure that the information each object in
/// Unity (Academy and Brains) wants to send to External is appropriately batched together
/// and sent only when necessary.
///
/// The Batcher will only send a Message to the Communicator when either :
/// 1 - The academy is done
/// 2 - At least one brain has data to send
///
/// At each step, the batcher will keep track of the brains that queried the batcher for that
/// step. The batcher can only send the batched data when all the Brains have queried the
/// Batcher.
/// </summary>
public class Batcher
{
/// The default number of agents in the scene
private const int k_NumAgents = 32;
/// Keeps track of which brains have data to send on the current step
Dictionary<string, bool> m_HasData =
new Dictionary<string, bool>();
/// Keeps track of which brains queried the batcher on the current step
Dictionary<string, bool> m_HasQueried =
new Dictionary<string, bool>();
/// Keeps track of the agents of each brain on the current step
Dictionary<string, List<Agent>> m_CurrentAgents =
new Dictionary<string, List<Agent>>();
/// The Communicator of the batcher, sends a message at most once per step
ICommunicator m_Communicator;
/// The current UnityRLOutput to be sent when all the brains queried the batcher
CommunicatorObjects.UnityRLOutput m_CurrentUnityRlOutput =
new CommunicatorObjects.UnityRLOutput();
/// Keeps track of last CommandProto sent by External
CommunicatorObjects.CommandProto m_Command;
/// Keeps track of last EnvironmentParametersProto sent by External
CommunicatorObjects.EnvironmentParametersProto m_EnvironmentParameters;
/// Keeps track of last training mode sent by External
bool m_IsTraining;
/// Keeps track of the number of messages received
private ulong m_MessagesReceived;
/// <summary>
/// Initializes a new instance of the Batcher class.
/// </summary>
/// <param name="communicator">The communicator to be used by the batcher.</param>
public Batcher(ICommunicator communicator)
{
m_Communicator = communicator;
}
/// <summary>
/// Sends the academy parameters through the Communicator.
/// Is used by the academy to send the AcademyParameters to the communicator.
/// </summary>
/// <returns>The External Initialization Parameters received.</returns>
/// <param name="academyParameters">The Unity Initialization Parameters to be sent.</param>
public CommunicatorObjects.UnityRLInitializationInput SendAcademyParameters(
CommunicatorObjects.UnityRLInitializationOutput academyParameters)
{
CommunicatorObjects.UnityInput input;
var initializationInput = new CommunicatorObjects.UnityInput();
try
{
initializationInput = m_Communicator.Initialize(
new CommunicatorObjects.UnityOutput
{
RlInitializationOutput = academyParameters
},
out input);
}
catch
{
var exceptionMessage = "The Communicator was unable to connect. Please make sure the External " +
"process is ready to accept communication with Unity.";
// Check for common error condition and add details to the exception message.
var httpProxy = Environment.GetEnvironmentVariable("HTTP_PROXY");
var httpsProxy = Environment.GetEnvironmentVariable("HTTPS_PROXY");
if (httpProxy != null || httpsProxy != null)
{
exceptionMessage += " Try removing HTTP_PROXY and HTTPS_PROXY from the" +
"environment variables and try again.";
}
throw new UnityAgentsException(exceptionMessage);
}
var firstRlInput = input.RlInput;
m_Command = firstRlInput.Command;
m_EnvironmentParameters = firstRlInput.EnvironmentParameters;
m_IsTraining = firstRlInput.IsTraining;
return initializationInput.RlInitializationInput;
}
/// <summary>
/// Gets the command. Is used by the academy to get reset or quit signals.
/// </summary>
/// <returns>The current command.</returns>
public CommunicatorObjects.CommandProto GetCommand()
{
return m_Command;
}
/// <summary>
/// Gets the number of messages received so far. Can be used to check for new messages.
/// </summary>
/// <returns>The number of messages received since start of the simulation</returns>
public ulong GetNumberMessageReceived()
{
return m_MessagesReceived;
}
/// <summary>
/// Gets the environment parameters. Is used by the academy to update
/// the environment parameters.
/// </summary>
/// <returns>The environment parameters.</returns>
public CommunicatorObjects.EnvironmentParametersProto GetEnvironmentParameters()
{
return m_EnvironmentParameters;
}
/// <summary>
/// Gets the last training_mode flag External sent
/// </summary>
/// <returns><c>true</c>, if training mode is requested, <c>false</c> otherwise.</returns>
public bool GetIsTraining()
{
return m_IsTraining;
}
/// <summary>
/// Adds the brain to the list of brains which will be sending information to External.
/// </summary>
/// <param name="brainKey">Brain key.</param>
public void SubscribeBrain(string brainKey)
{
m_HasQueried[brainKey] = false;
m_HasData[brainKey] = false;
m_CurrentAgents[brainKey] = new List<Agent>(k_NumAgents);
m_CurrentUnityRlOutput.AgentInfos.Add(
brainKey,
new CommunicatorObjects.UnityRLOutput.Types.ListAgentInfoProto());
}
/// <summary>
/// Sends the brain info. If at least one brain has an agent in need of
/// a decision or if the academy is done, the data is sent via
/// Communicator. Else, a new step is realized. The data can only be
/// sent once all the brains that subscribed to the batcher have tried
/// to send information.
/// </summary>
/// <param name="brainKey">Brain key.</param>
/// <param name="agentInfo">Agent info.</param>
public void SendBrainInfo(
string brainKey, Dictionary<Agent, AgentInfo> agentInfo)
{
// If no communicator is initialized, the Batcher will not transmit
// BrainInfo
if (m_Communicator == null)
{
return;
}
// The brain tried called GiveBrainInfo, update m_hasQueried
m_HasQueried[brainKey] = true;
// Populate the currentAgents dictionary
m_CurrentAgents[brainKey].Clear();
foreach (var agent in agentInfo.Keys)
{
m_CurrentAgents[brainKey].Add(agent);
}
// If at least one agent has data to send, then append data to
// the message and update hasSentState
if (m_CurrentAgents[brainKey].Count > 0)
{
foreach (var agent in m_CurrentAgents[brainKey])
{
var agentInfoProto = agentInfo[agent].ToProto();
m_CurrentUnityRlOutput.AgentInfos[brainKey].Value.Add(agentInfoProto);
// Avoid visual obs memory leak. This should be called AFTER we are done with the visual obs.
// e.g. after recording them to demo and using them for inference.
agentInfo[agent].ClearVisualObs();
}
m_HasData[brainKey] = true;
}
// If any agent needs to send data, then the whole message
// must be sent
if (m_HasQueried.Values.All(x => x))
{
if (m_HasData.Values.Any(x => x))
{
SendBatchedMessageHelper();
}
// The message was just sent so we must reset hasSentState and
// triedSendState
foreach (var k in m_CurrentAgents.Keys)
{
m_HasData[k] = false;
m_HasQueried[k] = false;
}
}
}
/// <summary>
/// Helper method that sends the current UnityRLOutput, receives the next UnityInput and
/// Applies the appropriate AgentAction to the agents.
/// </summary>
void SendBatchedMessageHelper()
{
var input = m_Communicator.Exchange(
new CommunicatorObjects.UnityOutput
{
RlOutput = m_CurrentUnityRlOutput
});
m_MessagesReceived += 1;
foreach (var k in m_CurrentUnityRlOutput.AgentInfos.Keys)
{
m_CurrentUnityRlOutput.AgentInfos[k].Value.Clear();
}
if (input == null)
{
m_Command = CommunicatorObjects.CommandProto.Quit;
return;
}
var rlInput = input.RlInput;
if (rlInput == null)
{
m_Command = CommunicatorObjects.CommandProto.Quit;
return;
}
m_Command = rlInput.Command;
m_EnvironmentParameters = rlInput.EnvironmentParameters;
m_IsTraining = rlInput.IsTraining;
if (rlInput.AgentActions == null)
{
return;
}
foreach (var brainName in rlInput.AgentActions.Keys)
{
if (!m_CurrentAgents[brainName].Any())
{
continue;
}
if (!rlInput.AgentActions[brainName].Value.Any())
{
continue;
}
for (var i = 0; i < m_CurrentAgents[brainName].Count; i++)
{
var agent = m_CurrentAgents[brainName][i];
var action = rlInput.AgentActions[brainName].Value[i];
agent.UpdateVectorAction(action.VectorActions.ToArray());
agent.UpdateMemoriesAction(action.Memories.ToList());
agent.UpdateTextAction(action.TextActions);
agent.UpdateValueAction(action.Value);
agent.UpdateCustomAction(action.CustomAction);
}
}
}
}
}
| 290 |
ml-agents | openai | C# | using UnityEngine;
namespace MLAgents
{
/// <summary>
/// Behavioral Cloning Helper script. Attach to teacher agent to enable
/// resetting the experience buffer, as well as toggling session recording.
/// </summary>
public class BcTeacherHelper : MonoBehaviour
{
bool m_RecordExperiences;
bool m_ResetBuffer;
Agent m_MyAgent;
float m_BufferResetTime;
public KeyCode recordKey = KeyCode.R;
public KeyCode resetKey = KeyCode.C;
// Use this for initialization
void Start()
{
m_RecordExperiences = true;
m_ResetBuffer = false;
m_MyAgent = GetComponent<Agent>();
m_BufferResetTime = Time.time;
}
// Update is called once per frame
void Update()
{
if (Input.GetKeyDown(recordKey))
{
m_RecordExperiences = !m_RecordExperiences;
}
if (Input.GetKeyDown(resetKey))
{
m_ResetBuffer = true;
m_BufferResetTime = Time.time;
}
else
{
m_ResetBuffer = false;
}
Monitor.Log("Recording experiences " + recordKey, m_RecordExperiences.ToString());
var timeSinceBufferReset = Time.time - m_BufferResetTime;
Monitor.Log("Seconds since buffer reset " + resetKey,
Mathf.FloorToInt(timeSinceBufferReset).ToString());
}
void FixedUpdate()
{
// Convert both bools into single comma separated string. Python makes
// assumption that this structure is preserved.
m_MyAgent.SetTextObs(m_RecordExperiences + "," + m_ResetBuffer);
}
}
}
| 60 |
ml-agents | openai | C# | using System.Collections.Generic;
using UnityEngine;
namespace MLAgents
{
/// <summary>
/// Brain receive data from Agents through calls to SendState. The brain then updates the
/// actions of the agents at each FixedUpdate.
/// The Brain encapsulates the decision making process. Every Agent must be assigned a Brain,
/// but you can use the same Brain with more than one Agent. You can also create several
/// Brains, attach each of the Brain to one or more than one Agent.
/// Brain assets has several important properties that you can set using the Inspector window.
/// These properties must be appropriate for the Agents using the Brain. For example, the
/// Vector Observation Space Size property must match the length of the feature
/// vector created by an Agent exactly.
/// </summary>
public abstract class Brain : ScriptableObject
{
[SerializeField] public BrainParameters brainParameters;
protected Dictionary<Agent, AgentInfo> m_AgentInfos =
new Dictionary<Agent, AgentInfo>(1024);
protected Batcher m_BrainBatcher;
[System.NonSerialized]
private bool m_IsInitialized;
/// <summary>
/// Sets the Batcher of the Brain. The brain will call the batcher at every step and give
/// it the agent's data using SendBrainInfo at each DecideAction call.
/// </summary>
/// <param name="batcher"> The Batcher the brain will use for the current session</param>
public void SetBatcher(Batcher batcher)
{
if (batcher == null)
{
m_BrainBatcher = null;
}
else
{
m_BrainBatcher = batcher;
m_BrainBatcher.SubscribeBrain(name);
}
LazyInitialize();
}
/// <summary>
/// Adds the data of an agent to the current batch so it will be processed in DecideAction.
/// </summary>
/// <param name="agent"></param>
/// <param name="info"></param>
public void SendState(Agent agent, AgentInfo info)
{
LazyInitialize();
m_AgentInfos.Add(agent, info);
}
/// <summary>
/// If the Brain is not initialized, it subscribes to the Academy's DecideAction Event and
/// calls the Initialize method to be implemented by child classes.
/// </summary>
private void LazyInitialize()
{
if (!m_IsInitialized)
{
var academy = FindObjectOfType<Academy>();
if (academy)
{
academy.BrainDecideAction += BrainDecideAction;
academy.DestroyAction += Shutdown;
Initialize();
m_IsInitialized = true;
}
}
}
/// <summary>
/// Called by the Academy when it shuts down. This ensures that the Brain cleans up properly
/// after scene changes.
/// </summary>
private void Shutdown()
{
if (m_IsInitialized)
{
m_AgentInfos.Clear();
m_IsInitialized = false;
}
}
/// <summary>
/// Calls the DecideAction method that the concrete brain implements.
/// </summary>
private void BrainDecideAction()
{
m_BrainBatcher?.SendBrainInfo(name, m_AgentInfos);
DecideAction();
}
/// <summary>
/// Is called only once at the begening of the training or inference session.
/// </summary>
protected abstract void Initialize();
/// <summary>
/// Is called once per Environment Step after the Brain has been initialized.
/// </summary>
protected abstract void DecideAction();
}
}
| 112 |
ml-agents | openai | C# | using System;
using UnityEngine;
using System.Linq;
namespace MLAgents
{
public enum SpaceType
{
Discrete,
Continuous
};
/// <summary>
/// The resolution of a camera used by an agent.
/// The width defines the number of pixels on the horizontal axis.
/// The height defines the number of pixels on the verical axis.
/// blackAndWhite defines whether or not the image is grayscale.
/// </summary>
[Serializable]
public struct Resolution
{
/// <summary>The width of the observation in pixels </summary>
public int width;
/// <summary>The height of the observation in pixels</summary>
public int height;
/// <summary>
/// If true, the image will be in black and white.
/// If false, it will be in colors RGB
/// </summary>
public bool blackAndWhite;
}
/// <summary>
/// Holds information about the Brain. It defines what are the inputs and outputs of the
/// decision process.
/// </summary>
[Serializable]
public class BrainParameters
{
/// <summary>
/// If continuous : The length of the float vector that represents
/// the state
/// If discrete : The number of possible values the state can take
/// </summary>
public int vectorObservationSize = 1;
[Range(1, 50)] public int numStackedVectorObservations = 1;
/// <summary>
/// If continuous : The length of the float vector that represents
/// the action
/// If discrete : The number of possible values the action can take*/
/// </summary>
public int[] vectorActionSize = new[] {1};
/// <summary> The list of observation resolutions for the brain</summary>
public Resolution[] cameraResolutions;
/// <summary></summary>The list of strings describing what the actions correpond to */
public string[] vectorActionDescriptions;
/// <summary>Defines if the action is discrete or continuous</summary>
public SpaceType vectorActionSpaceType = SpaceType.Discrete;
/// <summary>
/// Converts a Brain into to a Protobuff BrainInfoProto so it can be sent
/// </summary>
/// <returns>The BrainInfoProto generated.</returns>
/// <param name="name">The name of the brain.</param>
/// <param name="isTraining">Whether or not the Brain is training.</param>
public CommunicatorObjects.BrainParametersProto
ToProto(string name, bool isTraining)
{
var brainParametersProto = new CommunicatorObjects.BrainParametersProto
{
VectorObservationSize = vectorObservationSize,
NumStackedVectorObservations = numStackedVectorObservations,
VectorActionSize = {vectorActionSize},
VectorActionSpaceType =
(CommunicatorObjects.SpaceTypeProto)vectorActionSpaceType,
BrainName = name,
IsTraining = isTraining
};
brainParametersProto.VectorActionDescriptions.AddRange(vectorActionDescriptions);
foreach (var res in cameraResolutions)
{
brainParametersProto.CameraResolutions.Add(
new CommunicatorObjects.ResolutionProto
{
Width = res.width,
Height = res.height,
GrayScale = res.blackAndWhite
});
}
return brainParametersProto;
}
public BrainParameters()
{
}
/// <summary>
/// Converts Resolution protobuf array to C# Resolution array.
/// </summary>
private static Resolution[] ResolutionProtoToNative(
CommunicatorObjects.ResolutionProto[] resolutionProtos)
{
var localCameraResolutions = new Resolution[resolutionProtos.Length];
for (var i = 0; i < resolutionProtos.Length; i++)
{
localCameraResolutions[i] = new Resolution
{
height = resolutionProtos[i].Height,
width = resolutionProtos[i].Width,
blackAndWhite = resolutionProtos[i].GrayScale
};
}
return localCameraResolutions;
}
public BrainParameters(CommunicatorObjects.BrainParametersProto brainParametersProto)
{
vectorObservationSize = brainParametersProto.VectorObservationSize;
cameraResolutions = ResolutionProtoToNative(
brainParametersProto.CameraResolutions.ToArray()
);
numStackedVectorObservations = brainParametersProto.NumStackedVectorObservations;
vectorActionSize = brainParametersProto.VectorActionSize.ToArray();
vectorActionDescriptions = brainParametersProto.VectorActionDescriptions.ToArray();
vectorActionSpaceType = (SpaceType)brainParametersProto.VectorActionSpaceType;
}
/// <summary>
/// Deep clones the BrainParameter object
/// </summary>
/// <returns> A new BrainParameter object with the same values as the original.</returns>
public BrainParameters Clone()
{
return new BrainParameters()
{
vectorObservationSize = vectorObservationSize,
numStackedVectorObservations = numStackedVectorObservations,
vectorActionSize = (int[])vectorActionSize.Clone(),
cameraResolutions = (Resolution[])cameraResolutions.Clone(),
vectorActionDescriptions = (string[])vectorActionDescriptions.Clone(),
vectorActionSpaceType = vectorActionSpaceType
};
}
}
}
| 155 |
ml-agents | openai | C# | using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Serialization;
namespace MLAgents
{
/// <summary>
/// BroadcastHub holds reference to brains and keeps track wether or not the brain be
/// remotely controlled.
/// </summary>
[System.Serializable]
public class BroadcastHub
{
[SerializeField]
public List<Brain> broadcastingBrains = new List<Brain>();
[FormerlySerializedAs("_brainsToControl")]
[SerializeField]
private List<Brain> m_BrainsToControl = new List<Brain>();
/// <summary>
/// The number of Brains inside the BroadcastingHub.
/// </summary>
public int Count
{
get { return broadcastingBrains.Count; }
}
/// <summary>
/// Checks that a given Brain is set to be remote controlled.
/// </summary>
/// <param name="brain"> The Brain that is beeing checked</param>
/// <returns>true if the Brain is set to Controlled and false otherwise. Will return
/// false if the Brain is not present in the Hub.</returns>
public bool IsControlled(Brain brain)
{
return m_BrainsToControl.Contains(brain);
}
/// <summary>
/// Sets a brain to controlled.
/// </summary>
/// <param name="brain"> The Brain that is being set to controlled</param>
/// <param name="controlled"> if true, the Brain will be set to remote controlled. Otherwise
/// the brain will be set to broadcast only.</param>
public void SetControlled(Brain brain, bool controlled)
{
if (broadcastingBrains.Contains(brain))
{
if (controlled && !m_BrainsToControl.Contains(brain))
{
m_BrainsToControl.Add(brain);
}
if (!controlled && m_BrainsToControl.Contains(brain))
{
m_BrainsToControl.Remove(brain);
}
}
}
/// <summary>
/// Removes all the Brains of the BroadcastHub
/// </summary>
public void Clear()
{
broadcastingBrains.Clear();
m_BrainsToControl.Clear();
}
}
}
| 71 |
ml-agents | openai | C# | using System.Collections.Generic;
using UnityEngine;
namespace MLAgents
{
/// <summary>
/// Interface for implementing the behavior of an Agent that uses a Heuristic
/// Brain. The behavior of an Agent in this case is fully decided using the
/// implementation of these methods and no training or inference takes place.
/// Currently, the Heuristic Brain does not support text observations and actions.
/// </summary>
public abstract class Decision : ScriptableObject
{
public BrainParameters brainParameters;
/// <summary>
/// Defines the decision-making logic of the agent. Given the information
/// about the agent, returns a vector of actions.
/// </summary>
/// <returns>Vector action vector.</returns>
/// <param name="vectorObs">The vector observations of the agent.</param>
/// <param name="visualObs">The cameras the agent uses for visual observations.</param>
/// <param name="reward">The reward the agent received at the previous step.</param>
/// <param name="done">Whether or not the agent is done.</param>
/// <param name="memory">
/// The memories stored from the previous step with
/// <see cref="MakeMemory(List{float}, List{Texture2D}, float, bool, List{float})"/>
/// </param>
public abstract float[] Decide(
List<float>
vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory);
/// <summary>
/// Defines the logic for creating the memory vector for the Agent.
/// </summary>
/// <returns>The vector of memories the agent will use at the next step.</returns>
/// <param name="vectorObs">The vector observations of the agent.</param>
/// <param name="visualObs">The cameras the agent uses for visual observations.</param>
/// <param name="reward">The reward the agent received at the previous step.</param>
/// <param name="done">Whether or not the agent is done.</param>
/// <param name="memory">
/// The memories stored from the previous call to this method.
/// </param>
public abstract List<float> MakeMemory(
List<float> vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory);
}
}
| 56 |
ml-agents | openai | C# | using System;
using MLAgents.CommunicatorObjects;
using UnityEngine;
namespace MLAgents
{
/// <summary>
/// Demonstration Object. Contains meta-data regarding demonstration.
/// Used for imitation learning, or other forms of learning from data.
/// </summary>
[Serializable]
public class Demonstration : ScriptableObject
{
public DemonstrationMetaData metaData;
public BrainParameters brainParameters;
public void Initialize(BrainParameters brainParams,
DemonstrationMetaData demonstrationMetaData)
{
brainParameters = brainParams;
metaData = demonstrationMetaData;
}
}
/// <summary>
/// Demonstration meta-data.
/// Kept in a struct for easy serialization and deserialization.
/// </summary>
[Serializable]
public class DemonstrationMetaData
{
public int numberExperiences;
public int numberEpisodes;
public float meanReward;
public string demonstrationName;
public const int ApiVersion = 1;
/// <summary>
/// Constructor for initializing metadata to default values.
/// </summary>
public DemonstrationMetaData()
{
}
/// <summary>
/// Initialize metadata values based on proto object.
/// </summary>
public DemonstrationMetaData(DemonstrationMetaProto demoProto)
{
numberEpisodes = demoProto.NumberEpisodes;
numberExperiences = demoProto.NumberSteps;
meanReward = demoProto.MeanReward;
demonstrationName = demoProto.DemonstrationName;
if (demoProto.ApiVersion != ApiVersion)
{
throw new Exception("API versions of demonstration are incompatible.");
}
}
/// <summary>
/// Convert metadata object to proto object.
/// </summary>
public DemonstrationMetaProto ToProto()
{
var demoProto = new DemonstrationMetaProto
{
ApiVersion = ApiVersion,
MeanReward = meanReward,
NumberSteps = numberExperiences,
NumberEpisodes = numberEpisodes,
DemonstrationName = demonstrationName
};
return demoProto;
}
}
}
| 77 |
ml-agents | openai | C# | using UnityEngine;
using System.Text.RegularExpressions;
namespace MLAgents
{
/// <summary>
/// Demonstration Recorder Component.
/// </summary>
[RequireComponent(typeof(Agent))]
public class DemonstrationRecorder : MonoBehaviour
{
public bool record;
public string demonstrationName;
private Agent m_RecordingAgent;
private string m_FilePath;
private DemonstrationStore m_DemoStore;
public const int MaxNameLength = 16;
private void Start()
{
if (Application.isEditor && record)
{
InitializeDemoStore();
}
}
private void Update()
{
if (Application.isEditor && record && m_DemoStore == null)
{
InitializeDemoStore();
}
}
/// <summary>
/// Creates demonstration store for use in recording.
/// </summary>
private void InitializeDemoStore()
{
m_RecordingAgent = GetComponent<Agent>();
m_DemoStore = new DemonstrationStore();
demonstrationName = SanitizeName(demonstrationName, MaxNameLength);
m_DemoStore.Initialize(
demonstrationName,
m_RecordingAgent.brain.brainParameters,
m_RecordingAgent.brain.name);
Monitor.Log("Recording Demonstration of Agent: ", m_RecordingAgent.name);
}
/// <summary>
/// Removes all characters except alphanumerics from demonstration name.
/// Shorten name if it is longer than the maxNameLength.
/// </summary>
public static string SanitizeName(string demoName, int maxNameLength)
{
var rgx = new Regex("[^a-zA-Z0-9 -]");
demoName = rgx.Replace(demoName, "");
// If the string is too long, it will overflow the metadata.
if (demoName.Length > maxNameLength)
{
demoName = demoName.Substring(0, maxNameLength);
}
return demoName;
}
/// <summary>
/// Forwards AgentInfo to Demonstration Store.
/// </summary>
public void WriteExperience(AgentInfo info)
{
m_DemoStore.Record(info);
}
/// <summary>
/// Closes Demonstration store.
/// </summary>
private void OnApplicationQuit()
{
if (Application.isEditor && record && m_DemoStore != null)
{
m_DemoStore.Close();
}
}
}
}
| 86 |
ml-agents | openai | C# | using System.IO;
using System.IO.Abstractions;
using Google.Protobuf;
namespace MLAgents
{
/// <summary>
/// Responsible for writing demonstration data to file.
/// </summary>
public class DemonstrationStore
{
public const int MetaDataBytes = 32; // Number of bytes allocated to metadata in demo file.
private readonly IFileSystem m_FileSystem;
private const string k_DemoDirecory = "Assets/Demonstrations/";
private const string k_ExtensionType = ".demo";
private string m_FilePath;
private DemonstrationMetaData m_MetaData;
private Stream m_Writer;
private float m_CumulativeReward;
public DemonstrationStore(IFileSystem fileSystem)
{
m_FileSystem = fileSystem;
}
public DemonstrationStore()
{
m_FileSystem = new FileSystem();
}
/// <summary>
/// Initializes the Demonstration Store, and writes initial data.
/// </summary>
public void Initialize(
string demonstrationName, BrainParameters brainParameters, string brainName)
{
CreateDirectory();
CreateDemonstrationFile(demonstrationName);
WriteBrainParameters(brainName, brainParameters);
}
/// <summary>
/// Checks for the existence of the Demonstrations directory
/// and creates it if it does not exist.
/// </summary>
private void CreateDirectory()
{
if (!m_FileSystem.Directory.Exists(k_DemoDirecory))
{
m_FileSystem.Directory.CreateDirectory(k_DemoDirecory);
}
}
/// <summary>
/// Creates demonstration file.
/// </summary>
private void CreateDemonstrationFile(string demonstrationName)
{
// Creates demonstration file.
var literalName = demonstrationName;
m_FilePath = k_DemoDirecory + literalName + k_ExtensionType;
var uniqueNameCounter = 0;
while (m_FileSystem.File.Exists(m_FilePath))
{
literalName = demonstrationName + "_" + uniqueNameCounter;
m_FilePath = k_DemoDirecory + literalName + k_ExtensionType;
uniqueNameCounter++;
}
m_Writer = m_FileSystem.File.Create(m_FilePath);
m_MetaData = new DemonstrationMetaData {demonstrationName = demonstrationName};
var metaProto = m_MetaData.ToProto();
metaProto.WriteDelimitedTo(m_Writer);
}
/// <summary>
/// Writes brain parameters to file.
/// </summary>
private void WriteBrainParameters(string brainName, BrainParameters brainParameters)
{
// Writes BrainParameters to file.
m_Writer.Seek(MetaDataBytes + 1, 0);
var brainProto = brainParameters.ToProto(brainName, false);
brainProto.WriteDelimitedTo(m_Writer);
}
/// <summary>
/// Write AgentInfo experience to file.
/// </summary>
public void Record(AgentInfo info)
{
// Increment meta-data counters.
m_MetaData.numberExperiences++;
m_CumulativeReward += info.reward;
if (info.done)
{
EndEpisode();
}
// Write AgentInfo to file.
var agentProto = info.ToProto();
agentProto.WriteDelimitedTo(m_Writer);
}
/// <summary>
/// Performs all clean-up necessary
/// </summary>
public void Close()
{
EndEpisode();
m_MetaData.meanReward = m_CumulativeReward / m_MetaData.numberEpisodes;
WriteMetadata();
m_Writer.Close();
}
/// <summary>
/// Performs necessary episode-completion steps.
/// </summary>
private void EndEpisode()
{
m_MetaData.numberEpisodes += 1;
}
/// <summary>
/// Writes meta-data.
/// </summary>
private void WriteMetadata()
{
var metaProto = m_MetaData.ToProto();
var metaProtoBytes = metaProto.ToByteArray();
m_Writer.Write(metaProtoBytes, 0, metaProtoBytes.Length);
m_Writer.Seek(0, 0);
metaProto.WriteDelimitedTo(m_Writer);
}
}
}
| 138 |
ml-agents | openai | C# | using UnityEngine;
using UnityEngine.Serialization;
#if UNITY_EDITOR
using UnityEditor;
#endif
namespace MLAgents
{
/// <summary>
/// The Heuristic Brain type allows you to hand code an Agent's decision making process.
/// A Heuristic Brain requires an implementation of the Decision interface to which it
/// delegates the decision making process.
/// When yusing a Heuristic Brain, you must give it a Monoscript of a Decision implementation.
/// </summary>
[CreateAssetMenu(fileName = "NewHeuristicBrain", menuName = "ML-Agents/Heuristic Brain")]
public class HeuristicBrain : Brain
{
[SerializeField]
[HideInInspector]
public Decision decision;
#if UNITY_EDITOR
[HideInInspector]
public MonoScript decisionScript;
#endif
[FormerlySerializedAs("c_decision")]
[SerializeField]
[HideInInspector]
public string cDecision;
public void OnValidate()
{
#if UNITY_EDITOR
if (decisionScript != null)
{
cDecision = decisionScript.GetClass().Name;
}
else
{
cDecision = "";
}
#endif
}
/// <inheritdoc/>
protected override void Initialize()
{
if ((cDecision != null) && decision == null)
{
decision = CreateInstance(cDecision) as Decision;
decision.brainParameters = brainParameters;
}
}
///Uses the Decision Component to decide that action to take
protected override void DecideAction()
{
if (decision == null)
{
throw new UnityAgentsException(
"The Brain is set to Heuristic, but no decision script attached to it");
}
foreach (var agent in m_AgentInfos.Keys)
{
agent.UpdateVectorAction(decision.Decide(
m_AgentInfos[agent].stackedVectorObservation,
m_AgentInfos[agent].visualObservations,
m_AgentInfos[agent].reward,
m_AgentInfos[agent].done,
m_AgentInfos[agent].memories));
}
foreach (var agent in m_AgentInfos.Keys)
{
agent.UpdateMemoriesAction(decision.MakeMemory(
m_AgentInfos[agent].stackedVectorObservation,
m_AgentInfos[agent].visualObservations,
m_AgentInfos[agent].reward,
m_AgentInfos[agent].done,
m_AgentInfos[agent].memories));
}
m_AgentInfos.Clear();
}
}
}
| 84 |
ml-agents | openai | C# | using UnityEngine;
using MLAgents.CommunicatorObjects;
namespace MLAgents
{
public struct CommunicatorParameters
{
public int port;
}
/**
This is the interface of the Communicators.
This does not need to be modified nor implemented to create a Unity environment.
When the Unity Communicator is initialized, it will wait for the External Communicator
to be initialized as well. The two communicators will then exchange their first messages
that will usually contain information for initialization (information that does not need
to be resent at each new exchange).
By convention a Unity input is from External to Unity and a Unity output is from Unity to
External. Inputs and outputs are relative to Unity.
By convention, when the Unity Communicator and External Communicator call exchange, the
exchange is NOT simultaneous but sequential. This means that when a side of the
communication calls exchange, the other will receive the result of its previous
exchange call.
This is what happens when A calls exchange a single time:
A sends data_1 to B -> B receives data_1 -> B generates and sends data_2 -> A receives data_2
When A calls exchange, it sends data_1 and receives data_2
Since the messages are sent back and forth with exchange and simultaneously when calling
initialize, External sends two messages at initialization.
The structure of the messages is as follows:
UnityMessage
...Header
...UnityOutput
......UnityRLOutput
......UnityRLInitializationOutput
...UnityInput
......UnityRLIntput
......UnityRLInitializationIntput
UnityOutput and UnityInput can be extended to provide functionalities beyond RL
UnityRLOutput and UnityRLInput can be extended to provide new RL functionalities
*/
public interface ICommunicator
{
/// <summary>
/// Initialize the communicator by sending the first UnityOutput and receiving the
/// first UnityInput. The second UnityInput is stored in the unityInput argument.
/// </summary>
/// <returns>The first Unity Input.</returns>
/// <param name="unityOutput">The first Unity Output.</param>
/// <param name="unityInput">The second Unity input.</param>
UnityInput Initialize(UnityOutput unityOutput,
out UnityInput unityInput);
/// <summary>
/// Send a UnityOutput and receives a UnityInput.
/// </summary>
/// <returns>The next UnityInput.</returns>
/// <param name="unityOutput">The UnityOutput to be sent.</param>
UnityInput Exchange(UnityOutput unityOutput);
/// <summary>
/// Close the communicator gracefully on both sides of the communication.
/// </summary>
void Close();
}
}
| 72 |
ml-agents | openai | C# | using System;
using System.Collections.Generic;
using UnityEngine;
using System.Linq;
using Barracuda;
using MLAgents.InferenceBrain;
using UnityEngine.Profiling;
namespace MLAgents
{
public enum InferenceDevice
{
CPU = 0,
GPU = 1
}
/// <summary>
/// The Learning Brain works differently if you are training it or not.
/// When training your Agents, drag the Learning Brain to the Academy's BroadcastHub and check
/// the checkbox Control. When using a pretrained model, just drag the Model file into the
/// Model property of the Learning Brain.
/// The property model corresponds to the Model currently attached to the Brain. Before
/// being used, a call to ReloadModel is required.
/// When the Learning Brain is not training, it uses a TensorFlow model to make decisions.
/// The Proximal Policy Optimization (PPO) and Behavioral Cloning algorithms included with
/// the ML-Agents SDK produce trained TensorFlow models that you can use with the
/// Learning Brain.
/// </summary>
[CreateAssetMenu(fileName = "NewLearningBrain", menuName = "ML-Agents/Learning Brain")]
public class LearningBrain : Brain
{
private ITensorAllocator m_TensorAllocator;
private TensorGenerator m_TensorGenerator;
private TensorApplier m_TensorApplier;
public NNModel model;
private Model m_BarracudaModel;
private IWorker m_Engine;
private bool m_Verbose = false;
private BarracudaModelParamLoader m_ModelParamLoader;
private string[] m_OutputNames;
[Tooltip("Inference execution device. CPU is the fastest option for most of ML Agents models. " +
"(This field is not applicable for training).")]
public InferenceDevice inferenceDevice = InferenceDevice.CPU;
private IReadOnlyList<TensorProxy> m_InferenceInputs;
private IReadOnlyList<TensorProxy> m_InferenceOutputs;
[NonSerialized]
private bool m_IsControlled;
/// <summary>
/// When Called, the brain will be controlled externally. It will not use the
/// model to decide on actions.
/// </summary>
public void SetToControlledExternally()
{
m_IsControlled = true;
}
/// <inheritdoc />
protected override void Initialize()
{
ReloadModel();
}
/// <summary>
/// Initializes the Brain with the Model that it will use when selecting actions for
/// the agents
/// </summary>
/// <param name="seed"> The seed that will be used to initialize the RandomNormal
/// and Multinomial obsjects used when running inference.</param>
/// <exception cref="UnityAgentsException">Throws an error when the model is null
/// </exception>
public void ReloadModel(int seed = 0)
{
if (m_TensorAllocator == null)
m_TensorAllocator = new TensorCachingAllocator();
if (model != null)
{
#if BARRACUDA_VERBOSE
_verbose = true;
#endif
D.logEnabled = m_Verbose;
// Cleanup previous instance
if (m_Engine != null)
m_Engine.Dispose();
m_BarracudaModel = ModelLoader.Load(model.Value);
var executionDevice = inferenceDevice == InferenceDevice.GPU
? BarracudaWorkerFactory.Type.ComputePrecompiled
: BarracudaWorkerFactory.Type.CSharp;
m_Engine = BarracudaWorkerFactory.CreateWorker(executionDevice, m_BarracudaModel, m_Verbose);
}
else
{
m_BarracudaModel = null;
m_Engine = null;
}
m_ModelParamLoader = BarracudaModelParamLoader.GetLoaderAndCheck(m_Engine, m_BarracudaModel, brainParameters);
m_InferenceInputs = m_ModelParamLoader.GetInputTensors();
m_OutputNames = m_ModelParamLoader.GetOutputNames();
m_TensorGenerator = new TensorGenerator(brainParameters, seed, m_TensorAllocator, m_BarracudaModel);
m_TensorApplier = new TensorApplier(brainParameters, seed, m_TensorAllocator, m_BarracudaModel);
}
/// <summary>
/// Return a list of failed checks corresponding to the failed compatibility checks
/// between the Model and the BrainParameters. Note : This does not reload the model.
/// If changes have been made to the BrainParameters or the Model, the model must be
/// reloaded using GiveModel before trying to get the compatibility checks.
/// </summary>
/// <returns> The list of the failed compatibility checks between the Model and the
/// Brain Parameters</returns>
public IEnumerable<string> GetModelFailedChecks()
{
return (m_ModelParamLoader != null) ? m_ModelParamLoader.GetChecks() : new List<string>();
}
/// <inheritdoc />
protected override void DecideAction()
{
if (m_IsControlled)
{
m_AgentInfos.Clear();
return;
}
var currentBatchSize = m_AgentInfos.Count();
if (currentBatchSize == 0)
{
return;
}
Profiler.BeginSample("LearningBrain.DecideAction");
if (m_Engine == null)
{
Debug.LogError($"No model was present for the Brain {name}.");
return;
}
Profiler.BeginSample($"MLAgents.{name}.GenerateTensors");
// Prepare the input tensors to be feed into the engine
m_TensorGenerator.GenerateTensors(m_InferenceInputs, currentBatchSize, m_AgentInfos);
Profiler.EndSample();
Profiler.BeginSample($"MLAgents.{name}.PrepareBarracudaInputs");
var inputs = PrepareBarracudaInputs(m_InferenceInputs);
Profiler.EndSample();
// Execute the Model
Profiler.BeginSample($"MLAgents.{name}.ExecuteGraph");
m_Engine.Execute(inputs);
Profiler.EndSample();
Profiler.BeginSample($"MLAgents.{name}.FetchBarracudaOutputs");
m_InferenceOutputs = FetchBarracudaOutputs(m_OutputNames);
Profiler.EndSample();
Profiler.BeginSample($"MLAgents.{name}.ApplyTensors");
// Update the outputs
m_TensorApplier.ApplyTensors(m_InferenceOutputs, m_AgentInfos);
Profiler.EndSample();
m_AgentInfos.Clear();
Profiler.EndSample();
}
protected Dictionary<string, Tensor> PrepareBarracudaInputs(IEnumerable<TensorProxy> infInputs)
{
var inputs = new Dictionary<string, Tensor>();
foreach (var inp in m_InferenceInputs)
{
inputs[inp.name] = inp.data;
}
return inputs;
}
protected List<TensorProxy> FetchBarracudaOutputs(string[] names)
{
var outputs = new List<TensorProxy>();
foreach (var n in names)
{
var output = m_Engine.Peek(n);
outputs.Add(TensorUtils.TensorProxyFromBarracuda(output, n));
}
return outputs;
}
public void OnDisable()
{
m_Engine?.Dispose();
m_TensorAllocator?.Reset(false);
}
}
}
| 204 |
ml-agents | openai | C# | using System.Collections.Generic;
using System.Linq;
using UnityEngine;
namespace MLAgents
{
/// <summary>
/// Monitor is used to display information about the Agent within the Unity
/// scene. Use the log function to add information to your monitor.
/// </summary>
public class Monitor : MonoBehaviour
{
/// <summary>
/// The type of monitor the information must be displayed in.
/// <slider> corresponds to a single rectangle whose width is given
/// by a float between -1 and 1. (green is positive, red is negative)
/// </slider>
/// <hist> corresponds to n vertical sliders. </hist>
/// <text> is a text field. </text>
/// <bar> is a rectangle of fixed length to represent the proportions </bar>
/// of a list of floats.
/// </summary>
public enum DisplayType
{
Independent,
Proportion
}
/// <summary>
/// Represents how high above the target the monitors will be.
/// </summary>
public static float verticalOffset = 3f;
static bool s_IsInstantiated;
static GameObject s_Canvas;
static Dictionary<Transform, Dictionary<string, DisplayValue>> s_DisplayTransformValues;
/// <summary>
/// Camera used to calculate GUI screen position relative to the target
/// transform.
/// </summary>
static Dictionary<Transform, Camera> s_TransformCamera;
static Color[] s_BarColors;
struct DisplayValue
{
public float time;
public string stringValue;
public float floatValue;
public float[] floatArrayValues;
public enum ValueType
{
Float,
FloatarrayIndependent,
FloatarrayProportion,
String
}
public ValueType valueType;
}
static GUIStyle s_KeyStyle;
static GUIStyle s_ValueStyle;
static GUIStyle s_GreenStyle;
static GUIStyle s_RedStyle;
static GUIStyle[] s_ColorStyle;
static bool s_Initialized;
/// <summary>
/// Use the Monitor.Log static function to attach information to a transform.
/// </summary>
/// <returns>The log.</returns>
/// <param name="key">The name of the information you wish to Log.</param>
/// <param name="value">The string value you want to display.</param>
/// <param name="target">The transform you want to attach the information to.
/// </param>
/// <param name="camera">Camera used to calculate GUI position relative to
/// the target. If null, `Camera.main` will be used.</param>
public static void Log(
string key,
string value,
Transform target = null,
Camera camera = null)
{
if (!s_IsInstantiated)
{
InstantiateCanvas();
s_IsInstantiated = true;
}
if (target == null)
{
target = s_Canvas.transform;
}
s_TransformCamera[target] = camera;
if (!s_DisplayTransformValues.Keys.Contains(target))
{
s_DisplayTransformValues[target] =
new Dictionary<string, DisplayValue>();
}
var displayValues =
s_DisplayTransformValues[target];
if (value == null)
{
RemoveValue(target, key);
return;
}
if (!displayValues.ContainsKey(key))
{
var dv = new DisplayValue();
dv.time = Time.timeSinceLevelLoad;
dv.stringValue = value;
dv.valueType = DisplayValue.ValueType.String;
displayValues[key] = dv;
while (displayValues.Count > 20)
{
var max = (
displayValues
.Aggregate((l, r) => l.Value.time < r.Value.time ? l : r)
.Key
);
RemoveValue(target, max);
}
}
else
{
var dv = displayValues[key];
dv.stringValue = value;
dv.valueType = DisplayValue.ValueType.String;
displayValues[key] = dv;
}
}
/// <summary>
/// Use the Monitor.Log static function to attach information to a transform.
/// </summary>
/// <returns>The log.</returns>
/// <param name="key">The name of the information you wish to Log.</param>
/// <param name="value">The float value you want to display.</param>
/// <param name="target">The transform you want to attach the information to.
/// </param>
/// <param name="camera">Camera used to calculate GUI position relative to
/// the target. If null, `Camera.main` will be used.</param>
public static void Log(
string key,
float value,
Transform target = null,
Camera camera = null)
{
if (!s_IsInstantiated)
{
InstantiateCanvas();
s_IsInstantiated = true;
}
if (target == null)
{
target = s_Canvas.transform;
}
s_TransformCamera[target] = camera;
if (!s_DisplayTransformValues.Keys.Contains(target))
{
s_DisplayTransformValues[target] = new Dictionary<string, DisplayValue>();
}
var displayValues = s_DisplayTransformValues[target];
if (!displayValues.ContainsKey(key))
{
var dv = new DisplayValue();
dv.time = Time.timeSinceLevelLoad;
dv.floatValue = value;
dv.valueType = DisplayValue.ValueType.Float;
displayValues[key] = dv;
while (displayValues.Count > 20)
{
var max = (
displayValues.Aggregate((l, r) => l.Value.time < r.Value.time ? l : r).Key);
RemoveValue(target, max);
}
}
else
{
var dv = displayValues[key];
dv.floatValue = value;
dv.valueType = DisplayValue.ValueType.Float;
displayValues[key] = dv;
}
}
/// <summary>
/// Use the Monitor.Log static function to attach information to a transform.
/// </summary>
/// <returns>The log.</returns>
/// <param name="key">The name of the information you wish to Log.</param>
/// <param name="value">The array of float you want to display.</param>
/// <param name="displayType">The type of display.</param>
/// <param name="target">The transform you want to attach the information to.
/// </param>
/// <param name="camera">Camera used to calculate GUI position relative to
/// the target. If null, `Camera.main` will be used.</param>
public static void Log(
string key,
float[] value,
Transform target = null,
DisplayType displayType = DisplayType.Independent,
Camera camera = null
)
{
if (!s_IsInstantiated)
{
InstantiateCanvas();
s_IsInstantiated = true;
}
if (target == null)
{
target = s_Canvas.transform;
}
s_TransformCamera[target] = camera;
if (!s_DisplayTransformValues.Keys.Contains(target))
{
s_DisplayTransformValues[target] = new Dictionary<string, DisplayValue>();
}
var displayValues = s_DisplayTransformValues[target];
if (!displayValues.ContainsKey(key))
{
var dv = new DisplayValue();
dv.time = Time.timeSinceLevelLoad;
dv.floatArrayValues = value;
if (displayType == DisplayType.Independent)
{
dv.valueType = DisplayValue.ValueType.FloatarrayIndependent;
}
else
{
dv.valueType = DisplayValue.ValueType.FloatarrayProportion;
}
displayValues[key] = dv;
while (displayValues.Count > 20)
{
var max = (
displayValues.Aggregate((l, r) => l.Value.time < r.Value.time ? l : r).Key);
RemoveValue(target, max);
}
}
else
{
var dv = displayValues[key];
dv.floatArrayValues = value;
if (displayType == DisplayType.Independent)
{
dv.valueType = DisplayValue.ValueType.FloatarrayIndependent;
}
else
{
dv.valueType = DisplayValue.ValueType.FloatarrayProportion;
}
displayValues[key] = dv;
}
}
/// <summary>
/// Remove a value from a monitor.
/// </summary>
/// <param name="target">
/// The transform to which the information is attached.
/// </param>
/// <param name="key">The key of the information you want to remove.</param>
public static void RemoveValue(Transform target, string key)
{
if (target == null)
{
target = s_Canvas.transform;
}
if (s_DisplayTransformValues.Keys.Contains(target))
{
if (s_DisplayTransformValues[target].ContainsKey(key))
{
s_DisplayTransformValues[target].Remove(key);
if (s_DisplayTransformValues[target].Keys.Count == 0)
{
s_DisplayTransformValues.Remove(target);
}
}
}
}
/// <summary>
/// Remove all information from a monitor.
/// </summary>
/// <param name="target">
/// The transform to which the information is attached.
/// </param>
public static void RemoveAllValues(Transform target)
{
if (target == null)
{
target = s_Canvas.transform;
}
if (s_DisplayTransformValues.Keys.Contains(target))
{
s_DisplayTransformValues.Remove(target);
}
}
/// <summary>
/// Use SetActive to enable or disable the Monitor via script
/// </summary>
/// <param name="active">Value to set the Monitor's status to.</param>
public static void SetActive(bool active)
{
if (!s_IsInstantiated)
{
InstantiateCanvas();
s_IsInstantiated = true;
}
if (s_Canvas != null)
{
s_Canvas.SetActive(active);
}
}
/// Initializes the canvas.
static void InstantiateCanvas()
{
s_Canvas = GameObject.Find("AgentMonitorCanvas");
if (s_Canvas == null)
{
s_Canvas = new GameObject();
s_Canvas.name = "AgentMonitorCanvas";
s_Canvas.AddComponent<Monitor>();
}
s_DisplayTransformValues = new Dictionary<Transform,
Dictionary<string, DisplayValue>>();
s_TransformCamera = new Dictionary<Transform, Camera>();
}
/// <inheritdoc/>
void OnGUI()
{
if (!s_Initialized)
{
Initialize();
s_Initialized = true;
}
var toIterate = s_DisplayTransformValues.Keys.ToList();
foreach (var target in toIterate)
{
if (target == null)
{
s_DisplayTransformValues.Remove(target);
continue;
}
// get camera
var cam = s_TransformCamera[target];
if (cam == null)
{
cam = Camera.main;
}
var widthScaler = (Screen.width / 1000f);
var keyPixelWidth = 100 * widthScaler;
var keyPixelHeight = 20 * widthScaler;
var paddingWidth = 10 * widthScaler;
var scale = 1f;
var origin = new Vector3(
Screen.width / 2.0f - keyPixelWidth, Screen.height);
if (!(target == s_Canvas.transform))
{
var camTransform = cam.transform;
var position = target.position;
var cam2Obj = position - camTransform.position;
scale = Mathf.Min(
1,
20f / (Vector3.Dot(cam2Obj, camTransform.forward)));
var worldPosition = cam.WorldToScreenPoint(
position + new Vector3(0, verticalOffset, 0));
origin = new Vector3(
worldPosition.x - keyPixelWidth * scale, Screen.height - worldPosition.y);
}
keyPixelWidth *= scale;
keyPixelHeight *= scale;
paddingWidth *= scale;
s_KeyStyle.fontSize = (int)(keyPixelHeight * 0.8f);
if (s_KeyStyle.fontSize < 2)
{
continue;
}
var displayValues = s_DisplayTransformValues[target];
var index = 0;
var orderedKeys = displayValues.Keys.OrderBy(x => - displayValues[x].time);
foreach (var key in orderedKeys)
{
s_KeyStyle.alignment = TextAnchor.MiddleRight;
GUI.Label(
new Rect(
origin.x, origin.y - (index + 1) * keyPixelHeight,
keyPixelWidth, keyPixelHeight),
key,
s_KeyStyle);
float[] vals;
GUIStyle s;
switch (displayValues[key].valueType)
{
case DisplayValue.ValueType.String:
s_ValueStyle.alignment = TextAnchor.MiddleLeft;
GUI.Label(
new Rect(
origin.x + paddingWidth + keyPixelWidth,
origin.y - (index + 1) * keyPixelHeight,
keyPixelWidth, keyPixelHeight),
displayValues[key].stringValue,
s_ValueStyle);
break;
case DisplayValue.ValueType.Float:
var sliderValue = displayValues[key].floatValue;
sliderValue = Mathf.Min(1f, sliderValue);
s = s_GreenStyle;
if (sliderValue < 0)
{
sliderValue = Mathf.Min(1f, -sliderValue);
s = s_RedStyle;
}
GUI.Box(
new Rect(
origin.x + paddingWidth + keyPixelWidth,
origin.y - (index + 0.9f) * keyPixelHeight,
keyPixelWidth * sliderValue, keyPixelHeight * 0.8f),
GUIContent.none,
s);
break;
case DisplayValue.ValueType.FloatarrayIndependent:
const float histWidth = 0.15f;
vals = displayValues[key].floatArrayValues;
for (var i = 0; i < vals.Length; i++)
{
var value = Mathf.Min(vals[i], 1);
s = s_GreenStyle;
if (value < 0)
{
value = Mathf.Min(1f, -value);
s = s_RedStyle;
}
GUI.Box(
new Rect(
origin.x + paddingWidth + keyPixelWidth +
(keyPixelWidth * histWidth + paddingWidth / 2) * i,
origin.y - (index + 0.1f) * keyPixelHeight,
keyPixelWidth * histWidth, -keyPixelHeight * value),
GUIContent.none,
s);
}
break;
case DisplayValue.ValueType.FloatarrayProportion:
var valsSum = 0f;
var valsCum = 0f;
vals = displayValues[key].floatArrayValues;
foreach (var f in vals)
{
valsSum += Mathf.Max(f, 0);
}
if (valsSum < float.Epsilon)
{
Debug.LogError(
$"The Monitor value for key {key} " +
"must be a list or array of " +
"positive values and cannot " +
"be empty.");
}
else
{
for (var i = 0; i < vals.Length; i++)
{
var value = Mathf.Max(vals[i], 0) / valsSum;
GUI.Box(
new Rect(
origin.x + paddingWidth +
keyPixelWidth + keyPixelWidth * valsCum,
origin.y - (index + 0.9f) * keyPixelHeight,
keyPixelWidth * value, keyPixelHeight * 0.8f),
GUIContent.none,
s_ColorStyle[i % s_ColorStyle.Length]);
valsCum += value;
}
}
break;
}
index++;
}
}
}
/// Helper method used to initialize the GUI. Called once.
void Initialize()
{
s_KeyStyle = GUI.skin.label;
s_ValueStyle = GUI.skin.label;
s_ValueStyle.clipping = TextClipping.Overflow;
s_ValueStyle.wordWrap = false;
s_BarColors = new[]
{
Color.magenta,
Color.blue,
Color.cyan,
Color.green,
Color.yellow,
Color.red
};
s_ColorStyle = new GUIStyle[s_BarColors.Length];
for (var i = 0; i < s_BarColors.Length; i++)
{
var texture = new Texture2D(1, 1, TextureFormat.ARGB32, false);
texture.SetPixel(0, 0, s_BarColors[i]);
texture.Apply();
var staticRectStyle = new GUIStyle();
staticRectStyle.normal.background = texture;
s_ColorStyle[i] = staticRectStyle;
}
s_GreenStyle = s_ColorStyle[3];
s_RedStyle = s_ColorStyle[5];
}
}
}
| 561 |
ml-agents | openai | C# | using UnityEngine;
using UnityEngine.Serialization;
namespace MLAgents
{
/// <summary>
/// Implemetation of the Player Brain. Inherits from the base class Brain. Allows the user to
/// manually select decisions for linked agents by creating a mapping from keys presses to
/// actions.
/// You can use Player Brains to control a "teacher" Agent that trains other Agents during
/// imitation learning. You can also use Player Brains to test your Agents and environment
/// before training agents with reinforcement learning.
/// </summary>
[CreateAssetMenu(fileName = "NewPlayerBrain", menuName = "ML-Agents/Player Brain")]
public class PlayerBrain : Brain
{
[System.Serializable]
public struct DiscretePlayerAction
{
public KeyCode key;
public int branchIndex;
public int value;
}
[System.Serializable]
public struct KeyContinuousPlayerAction
{
public KeyCode key;
public int index;
public float value;
}
[System.Serializable]
public struct AxisContinuousPlayerAction
{
public string axis;
public int index;
public float scale;
}
/// Contains the mapping from input to continuous actions
[SerializeField]
[FormerlySerializedAs("continuousPlayerActions")]
[Tooltip("The list of keys and the value they correspond to for continuous control.")]
public KeyContinuousPlayerAction[] keyContinuousPlayerActions;
/// Contains the mapping from input to continuous actions
[SerializeField]
[Tooltip("The list of axis actions.")]
public AxisContinuousPlayerAction[] axisContinuousPlayerActions;
/// Contains the mapping from input to discrete actions
[SerializeField]
[Tooltip("The list of keys and the value they correspond to for discrete control.")]
public DiscretePlayerAction[] discretePlayerActions;
protected override void Initialize() {}
/// Uses the continuous inputs or dicrete inputs of the player to
/// decide action
protected override void DecideAction()
{
if (brainParameters.vectorActionSpaceType == SpaceType.Continuous)
{
foreach (var agent in m_AgentInfos.Keys)
{
var action = new float[brainParameters.vectorActionSize[0]];
foreach (var cha in keyContinuousPlayerActions)
{
if (Input.GetKey(cha.key))
{
action[cha.index] = cha.value;
}
}
foreach (var axisAction in axisContinuousPlayerActions)
{
var axisValue = Input.GetAxis(axisAction.axis);
axisValue *= axisAction.scale;
if (Mathf.Abs(axisValue) > 0.0001)
{
action[axisAction.index] = axisValue;
}
}
agent.UpdateVectorAction(action);
}
}
else
{
foreach (var agent in m_AgentInfos.Keys)
{
var action = new float[brainParameters.vectorActionSize.Length];
foreach (var dha in discretePlayerActions)
{
if (Input.GetKey(dha.key))
{
action[dha.branchIndex] = dha.value;
}
}
agent.UpdateVectorAction(action);
}
}
m_AgentInfos.Clear();
}
}
}
| 106 |
ml-agents | openai | C# | using System;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Serialization;
namespace MLAgents
{
[Serializable]
public class ResetParameters : Dictionary<string, float>, ISerializationCallbackReceiver
{
[Serializable]
public struct ResetParameter
{
public string key;
public float value;
}
[FormerlySerializedAs("resetParameters")]
[SerializeField] private List<ResetParameter> m_ResetParameters = new List<ResetParameter>();
public void OnBeforeSerialize()
{
m_ResetParameters.Clear();
foreach (var pair in this)
{
var rp = new ResetParameter();
rp.key = pair.Key;
rp.value = pair.Value;
m_ResetParameters.Add(rp);
}
}
public void OnAfterDeserialize()
{
Clear();
for (var i = 0; i < m_ResetParameters.Count; i++)
{
if (ContainsKey(m_ResetParameters[i].key))
{
Debug.LogError("The ResetParameters contains the same key twice");
}
else
{
Add(m_ResetParameters[i].key, m_ResetParameters[i].value);
}
}
}
}
}
| 54 |
ml-agents | openai | C# | # if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX
using Grpc.Core;
#endif
#if UNITY_EDITOR
using UnityEditor;
#endif
using UnityEngine;
using MLAgents.CommunicatorObjects;
namespace MLAgents
{
/// Responsible for communication with External using gRPC.
public class RpcCommunicator : ICommunicator
{
/// If true, the communication is active.
bool m_IsOpen;
# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX
/// The Unity to External client.
UnityToExternal.UnityToExternalClient m_Client;
#endif
/// The communicator parameters sent at construction
CommunicatorParameters m_CommunicatorParameters;
/// <summary>
/// Initializes a new instance of the RPCCommunicator class.
/// </summary>
/// <param name="communicatorParameters">Communicator parameters.</param>
public RpcCommunicator(CommunicatorParameters communicatorParameters)
{
m_CommunicatorParameters = communicatorParameters;
}
/// <summary>
/// Initialize the communicator by sending the first UnityOutput and receiving the
/// first UnityInput. The second UnityInput is stored in the unityInput argument.
/// </summary>
/// <returns>The first Unity Input.</returns>
/// <param name="unityOutput">The first Unity Output.</param>
/// <param name="unityInput">The second Unity input.</param>
public UnityInput Initialize(UnityOutput unityOutput,
out UnityInput unityInput)
{
# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX
m_IsOpen = true;
var channel = new Channel(
"localhost:" + m_CommunicatorParameters.port,
ChannelCredentials.Insecure);
m_Client = new UnityToExternal.UnityToExternalClient(channel);
var result = m_Client.Exchange(WrapMessage(unityOutput, 200));
unityInput = m_Client.Exchange(WrapMessage(null, 200)).UnityInput;
#if UNITY_EDITOR
#if UNITY_2017_2_OR_NEWER
EditorApplication.playModeStateChanged += HandleOnPlayModeChanged;
#else
EditorApplication.playmodeStateChanged += HandleOnPlayModeChanged;
#endif
#endif
return result.UnityInput;
#else
throw new UnityAgentsException(
"You cannot perform training on this platform.");
#endif
}
/// <summary>
/// Close the communicator gracefully on both sides of the communication.
/// </summary>
public void Close()
{
# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX
if (!m_IsOpen)
{
return;
}
try
{
m_Client.Exchange(WrapMessage(null, 400));
m_IsOpen = false;
}
catch
{
// ignored
}
#else
throw new UnityAgentsException(
"You cannot perform training on this platform.");
#endif
}
/// <summary>
/// Send a UnityOutput and receives a UnityInput.
/// </summary>
/// <returns>The next UnityInput.</returns>
/// <param name="unityOutput">The UnityOutput to be sent.</param>
public UnityInput Exchange(UnityOutput unityOutput)
{
# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX
if (!m_IsOpen)
{
return null;
}
try
{
var message = m_Client.Exchange(WrapMessage(unityOutput, 200));
if (message.Header.Status == 200)
{
return message.UnityInput;
}
else
{
m_IsOpen = false;
return null;
}
}
catch
{
m_IsOpen = false;
return null;
}
#else
throw new UnityAgentsException(
"You cannot perform training on this platform.");
#endif
}
/// <summary>
/// Wraps the UnityOuptut into a message with the appropriate status.
/// </summary>
/// <returns>The UnityMessage corresponding.</returns>
/// <param name="content">The UnityOutput to be wrapped.</param>
/// <param name="status">The status of the message.</param>
private static UnityMessage WrapMessage(UnityOutput content, int status)
{
return new UnityMessage
{
Header = new Header { Status = status },
UnityOutput = content
};
}
/// <summary>
/// When the Unity application quits, the communicator must be closed
/// </summary>
private void OnApplicationQuit()
{
Close();
}
#if UNITY_EDITOR
#if UNITY_2017_2_OR_NEWER
/// <summary>
/// When the editor exits, the communicator must be closed
/// </summary>
/// <param name="state">State.</param>
private void HandleOnPlayModeChanged(PlayModeStateChange state)
{
// This method is run whenever the playmode state is changed.
if (state == PlayModeStateChange.ExitingPlayMode)
{
Close();
}
}
#else
/// <summary>
/// When the editor exits, the communicator must be closed
/// </summary>
private void HandleOnPlayModeChanged()
{
// This method is run whenever the playmode state is changed.
if (!EditorApplication.isPlayingOrWillChangePlaymode)
{
Close();
}
}
#endif
#endif
}
}
| 184 |
ml-agents | openai | C# | using Google.Protobuf;
using System.Net.Sockets;
using UnityEngine;
using MLAgents.CommunicatorObjects;
using System.Threading.Tasks;
#if UNITY_EDITOR
using UnityEditor;
#endif
namespace MLAgents
{
public class SocketCommunicator : ICommunicator
{
private const float k_TimeOut = 10f;
private const int k_MessageLength = 12000;
byte[] m_MessageHolder = new byte[k_MessageLength];
int m_ComPort;
Socket m_Sender;
byte[] m_LengthHolder = new byte[4];
CommunicatorParameters m_CommunicatorParameters;
public SocketCommunicator(CommunicatorParameters communicatorParameters)
{
m_CommunicatorParameters = communicatorParameters;
}
/// <summary>
/// Initialize the communicator by sending the first UnityOutput and receiving the
/// first UnityInput. The second UnityInput is stored in the unityInput argument.
/// </summary>
/// <returns>The first Unity Input.</returns>
/// <param name="unityOutput">The first Unity Output.</param>
/// <param name="unityInput">The second Unity input.</param>
public UnityInput Initialize(UnityOutput unityOutput,
out UnityInput unityInput)
{
m_Sender = new Socket(
AddressFamily.InterNetwork,
SocketType.Stream,
ProtocolType.Tcp);
m_Sender.Connect("localhost", m_CommunicatorParameters.port);
var initializationInput =
UnityMessage.Parser.ParseFrom(Receive());
Send(WrapMessage(unityOutput, 200).ToByteArray());
unityInput = UnityMessage.Parser.ParseFrom(Receive()).UnityInput;
#if UNITY_EDITOR
#if UNITY_2017_2_OR_NEWER
EditorApplication.playModeStateChanged += HandleOnPlayModeChanged;
#else
EditorApplication.playmodeStateChanged += HandleOnPlayModeChanged;
#endif
#endif
return initializationInput.UnityInput;
}
/// <summary>
/// Uses the socke to receive a byte[] from External. Reassemble a message that was split
/// by External if it was too long.
/// </summary>
/// <returns>The byte[] sent by External.</returns>
byte[] Receive()
{
m_Sender.Receive(m_LengthHolder);
var totalLength = System.BitConverter.ToInt32(m_LengthHolder, 0);
var location = 0;
var result = new byte[totalLength];
while (location != totalLength)
{
var fragment = m_Sender.Receive(m_MessageHolder);
System.Buffer.BlockCopy(
m_MessageHolder, 0, result, location, fragment);
location += fragment;
}
return result;
}
/// <summary>
/// Send the specified input via socket to External. Split the message into smaller
/// parts if it is too long.
/// </summary>
/// <param name="input">The byte[] to be sent.</param>
void Send(byte[] input)
{
var newArray = new byte[input.Length + 4];
input.CopyTo(newArray, 4);
System.BitConverter.GetBytes(input.Length).CopyTo(newArray, 0);
m_Sender.Send(newArray);
}
/// <summary>
/// Close the communicator gracefully on both sides of the communication.
/// </summary>
public void Close()
{
Send(WrapMessage(null, 400).ToByteArray());
}
/// <summary>
/// Send a UnityOutput and receives a UnityInput.
/// </summary>
/// <returns>The next UnityInput.</returns>
/// <param name="unityOutput">The UnityOutput to be sent.</param>
public UnityInput Exchange(UnityOutput unityOutput)
{
Send(WrapMessage(unityOutput, 200).ToByteArray());
byte[] received = null;
var task = Task.Run(() => received = Receive());
if (!task.Wait(System.TimeSpan.FromSeconds(k_TimeOut)))
{
throw new UnityAgentsException(
"The communicator took too long to respond.");
}
var message = UnityMessage.Parser.ParseFrom(received);
if (message.Header.Status != 200)
{
return null;
}
return message.UnityInput;
}
/// <summary>
/// Wraps the UnityOuptut into a message with the appropriate status.
/// </summary>
/// <returns>The UnityMessage corresponding.</returns>
/// <param name="content">The UnityOutput to be wrapped.</param>
/// <param name="status">The status of the message.</param>
private static UnityMessage WrapMessage(UnityOutput content, int status)
{
return new UnityMessage
{
Header = new Header { Status = status },
UnityOutput = content
};
}
/// <summary>
/// When the Unity application quits, the communicator must be closed
/// </summary>
private void OnApplicationQuit()
{
Close();
}
#if UNITY_EDITOR
#if UNITY_2017_2_OR_NEWER
/// <summary>
/// When the editor exits, the communicator must be closed
/// </summary>
/// <param name="state">State.</param>
private void HandleOnPlayModeChanged(PlayModeStateChange state)
{
// This method is run whenever the playmode state is changed.
if (state == PlayModeStateChange.ExitingPlayMode)
{
Close();
}
}
#else
/// <summary>
/// When the editor exits, the communicator must be closed
/// </summary>
private void HandleOnPlayModeChanged()
{
// This method is run whenever the playmode state is changed.
if (!EditorApplication.isPlayingOrWillChangePlaymode)
{
Close();
}
}
#endif
#endif
}
}
| 182 |
ml-agents | openai | C# | using System;
using UnityEngine;
using UnityEngine.SceneManagement;
namespace MLAgents
{
public class Startup : MonoBehaviour
{
private const string k_SceneVariableName = "SCENE_NAME";
private void Awake()
{
var sceneName = Environment.GetEnvironmentVariable(k_SceneVariableName);
SwitchScene(sceneName);
}
private static void SwitchScene(string sceneName)
{
if (sceneName == null)
{
throw new ArgumentException(
$"You didn't specified the {k_SceneVariableName} environment variable");
}
if (SceneUtility.GetBuildIndexByScenePath(sceneName) < 0)
{
throw new ArgumentException(
$"The scene {sceneName} doesn't exist within your build. ");
}
SceneManager.LoadSceneAsync(sceneName);
}
}
}
| 33 |
ml-agents | openai | C# | using System;
namespace MLAgents
{
/// Contains exceptions specific to ML-Agents.
[Serializable]
public class UnityAgentsException : Exception
{
/// When a UnityAgentsException is called, the timeScale is set to 0.
/// The simulation will end since no steps will be taken.
public UnityAgentsException(string message) : base(message)
{
}
/// A constructor is needed for serialization when an exception propagates
/// from a remoting server to the client.
protected UnityAgentsException(
System.Runtime.Serialization.SerializationInfo info,
System.Runtime.Serialization.StreamingContext context)
{
}
}
}
| 24 |
ml-agents | openai | C# | using UnityEngine;
using System.Collections.Generic;
using MLAgents.InferenceBrain;
namespace MLAgents
{
public static class Utilities
{
/// <summary>
/// Converts a list of Texture2D into a TensorProxy.
/// </summary>
/// <param name="textures">
/// The list of textures to be put into the tensor.
/// Note that the textures must have same width and height.
/// </param>
/// <param name="tensorProxy">
/// TensorProxy to fill with Texture data.
/// </param>
/// <param name="grayScale">
/// If set to <c>true</c> the textures will be converted to grayscale before
/// being stored in the tensor.
/// </param>
public static void TextureToTensorProxy(
List<Texture2D> textures,
TensorProxy tensorProxy,
bool grayScale)
{
var numTextures = textures.Count;
var width = textures[0].width;
var height = textures[0].height;
var data = tensorProxy.data;
for (var t = 0; t < numTextures; t++)
{
var texturePixels = textures[t].GetPixels32();
for (var h = height - 1; h >= 0; h--)
{
for (var w = 0; w < width; w++)
{
var currentPixel = texturePixels[(height - h - 1) * width + w];
if (grayScale)
{
data[t, h, w, 0] =
(currentPixel.r + currentPixel.g + currentPixel.b) / 3f / 255.0f;
}
else
{
// For Color32, the r, g and b values are between 0 and 255.
data[t, h, w, 0] = currentPixel.r / 255.0f;
data[t, h, w, 1] = currentPixel.g / 255.0f;
data[t, h, w, 2] = currentPixel.b / 255.0f;
}
}
}
}
}
/// <summary>
/// Calculates the cumulative sum of an integer array. The result array will be one element
/// larger than the input array since it has a padded 0 at the beginning.
/// If the input is [a, b, c], the result will be [0, a, a+b, a+b+c]
/// </summary>
/// <param name="input">
/// Input array whose elements will be cumulatively added
/// </param>
/// <returns> The cumulative sum of the input array.</returns>
public static int[] CumSum(int[] input)
{
var runningSum = 0;
var result = new int[input.Length + 1];
for (var actionIndex = 0; actionIndex < input.Length; actionIndex++)
{
runningSum += input[actionIndex];
result[actionIndex + 1] = runningSum;
}
return result;
}
/// <summary>
/// Shifts list elements to the left by the specified amount (in-place).
/// <param name="list">
/// List whose elements will be shifted
/// </param>
/// <param name="shiftAmount">
/// Amount to shift the elements to the left by
/// </param>
/// </summary>
public static void ShiftLeft<T>(List<T> list, int shiftAmount)
{
for (var i = shiftAmount; i < list.Count; i++)
{
list[i - shiftAmount] = list[i];
}
}
/// <summary>
/// Replaces target list elements with source list elements starting at specified position
/// in target list.
/// <param name="dst">
/// Target list, where the elements are added to
/// </param>
/// <param name="src">
/// Source array, where the elements are copied from
/// </param>
/// <param name="start">
/// Starting position in target list to copy elements to
/// </param>
/// </summary>
public static void ReplaceRange<T>(List<T> dst, List<T> src, int start)
{
for (var i = 0; i < src.Count; i++)
{
dst[i + start] = src[i];
}
}
/// <summary>
/// Adds elements to list without extra temp allocations (assuming it fits pre-allocated
/// capacity of the list). The built-in List/<T/>.AddRange() unfortunately allocates
/// a temporary list to add items (even if the original array has sufficient capacity):
/// https://stackoverflow.com/questions/2123161/listt-addrange-implementation-suboptimal
/// Note: this implementation might be slow with a large source array.
/// <param name="dst">
/// Target list, where the elements are added to
/// </param>
/// <param name="src">
/// Source array, where the elements are copied from
/// </param>
/// </summary>
// ReSharper disable once ParameterTypeCanBeEnumerable.Global
public static void AddRangeNoAlloc<T>(List<T> dst, T[] src)
{
// ReSharper disable once LoopCanBeConvertedToQuery
foreach (var item in src)
{
dst.Add(item);
}
}
}
}
| 141 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/agent_action_proto.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/agent_action_proto.proto</summary>
public static partial class AgentActionProtoReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/agent_action_proto.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static AgentActionProtoReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CjttbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2FnZW50X2Fj",
"dGlvbl9wcm90by5wcm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMaNm1sYWdl",
"bnRzL2VudnMvY29tbXVuaWNhdG9yX29iamVjdHMvY3VzdG9tX2FjdGlvbi5w",
"cm90byKcAQoQQWdlbnRBY3Rpb25Qcm90bxIWCg52ZWN0b3JfYWN0aW9ucxgB",
"IAMoAhIUCgx0ZXh0X2FjdGlvbnMYAiABKAkSEAoIbWVtb3JpZXMYAyADKAIS",
"DQoFdmFsdWUYBCABKAISOQoNY3VzdG9tX2FjdGlvbhgFIAEoCzIiLmNvbW11",
"bmljYXRvcl9vYmplY3RzLkN1c3RvbUFjdGlvbkIfqgIcTUxBZ2VudHMuQ29t",
"bXVuaWNhdG9yT2JqZWN0c2IGcHJvdG8z"));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.CustomActionReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.AgentActionProto), global::MLAgents.CommunicatorObjects.AgentActionProto.Parser, new[]{ "VectorActions", "TextActions", "Memories", "Value", "CustomAction" }, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class AgentActionProto : pb::IMessage<AgentActionProto> {
private static readonly pb::MessageParser<AgentActionProto> _parser = new pb::MessageParser<AgentActionProto>(() => new AgentActionProto());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<AgentActionProto> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.AgentActionProtoReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AgentActionProto() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AgentActionProto(AgentActionProto other) : this() {
vectorActions_ = other.vectorActions_.Clone();
textActions_ = other.textActions_;
memories_ = other.memories_.Clone();
value_ = other.value_;
customAction_ = other.customAction_ != null ? other.customAction_.Clone() : null;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AgentActionProto Clone() {
return new AgentActionProto(this);
}
/// <summary>Field number for the "vector_actions" field.</summary>
public const int VectorActionsFieldNumber = 1;
private static readonly pb::FieldCodec<float> _repeated_vectorActions_codec
= pb::FieldCodec.ForFloat(10);
private readonly pbc::RepeatedField<float> vectorActions_ = new pbc::RepeatedField<float>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<float> VectorActions {
get { return vectorActions_; }
}
/// <summary>Field number for the "text_actions" field.</summary>
public const int TextActionsFieldNumber = 2;
private string textActions_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string TextActions {
get { return textActions_; }
set {
textActions_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "memories" field.</summary>
public const int MemoriesFieldNumber = 3;
private static readonly pb::FieldCodec<float> _repeated_memories_codec
= pb::FieldCodec.ForFloat(26);
private readonly pbc::RepeatedField<float> memories_ = new pbc::RepeatedField<float>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<float> Memories {
get { return memories_; }
}
/// <summary>Field number for the "value" field.</summary>
public const int ValueFieldNumber = 4;
private float value_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public float Value {
get { return value_; }
set {
value_ = value;
}
}
/// <summary>Field number for the "custom_action" field.</summary>
public const int CustomActionFieldNumber = 5;
private global::MLAgents.CommunicatorObjects.CustomAction customAction_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.CustomAction CustomAction {
get { return customAction_; }
set {
customAction_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as AgentActionProto);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(AgentActionProto other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if(!vectorActions_.Equals(other.vectorActions_)) return false;
if (TextActions != other.TextActions) return false;
if(!memories_.Equals(other.memories_)) return false;
if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(Value, other.Value)) return false;
if (!object.Equals(CustomAction, other.CustomAction)) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
hash ^= vectorActions_.GetHashCode();
if (TextActions.Length != 0) hash ^= TextActions.GetHashCode();
hash ^= memories_.GetHashCode();
if (Value != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(Value);
if (customAction_ != null) hash ^= CustomAction.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
vectorActions_.WriteTo(output, _repeated_vectorActions_codec);
if (TextActions.Length != 0) {
output.WriteRawTag(18);
output.WriteString(TextActions);
}
memories_.WriteTo(output, _repeated_memories_codec);
if (Value != 0F) {
output.WriteRawTag(37);
output.WriteFloat(Value);
}
if (customAction_ != null) {
output.WriteRawTag(42);
output.WriteMessage(CustomAction);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
size += vectorActions_.CalculateSize(_repeated_vectorActions_codec);
if (TextActions.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(TextActions);
}
size += memories_.CalculateSize(_repeated_memories_codec);
if (Value != 0F) {
size += 1 + 4;
}
if (customAction_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(CustomAction);
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(AgentActionProto other) {
if (other == null) {
return;
}
vectorActions_.Add(other.vectorActions_);
if (other.TextActions.Length != 0) {
TextActions = other.TextActions;
}
memories_.Add(other.memories_);
if (other.Value != 0F) {
Value = other.Value;
}
if (other.customAction_ != null) {
if (customAction_ == null) {
CustomAction = new global::MLAgents.CommunicatorObjects.CustomAction();
}
CustomAction.MergeFrom(other.CustomAction);
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 10:
case 13: {
vectorActions_.AddEntriesFrom(input, _repeated_vectorActions_codec);
break;
}
case 18: {
TextActions = input.ReadString();
break;
}
case 26:
case 29: {
memories_.AddEntriesFrom(input, _repeated_memories_codec);
break;
}
case 37: {
Value = input.ReadFloat();
break;
}
case 42: {
if (customAction_ == null) {
CustomAction = new global::MLAgents.CommunicatorObjects.CustomAction();
}
input.ReadMessage(CustomAction);
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| 283 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/agent_info_proto.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/agent_info_proto.proto</summary>
public static partial class AgentInfoProtoReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/agent_info_proto.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static AgentInfoProtoReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CjltbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2FnZW50X2lu",
"Zm9fcHJvdG8ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzGjttbGFnZW50",
"cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2N1c3RvbV9vYnNlcnZhdGlv",
"bi5wcm90byLXAgoOQWdlbnRJbmZvUHJvdG8SIgoac3RhY2tlZF92ZWN0b3Jf",
"b2JzZXJ2YXRpb24YASADKAISGwoTdmlzdWFsX29ic2VydmF0aW9ucxgCIAMo",
"DBIYChB0ZXh0X29ic2VydmF0aW9uGAMgASgJEh0KFXN0b3JlZF92ZWN0b3Jf",
"YWN0aW9ucxgEIAMoAhIbChNzdG9yZWRfdGV4dF9hY3Rpb25zGAUgASgJEhAK",
"CG1lbW9yaWVzGAYgAygCEg4KBnJld2FyZBgHIAEoAhIMCgRkb25lGAggASgI",
"EhgKEG1heF9zdGVwX3JlYWNoZWQYCSABKAgSCgoCaWQYCiABKAUSEwoLYWN0",
"aW9uX21hc2sYCyADKAgSQwoSY3VzdG9tX29ic2VydmF0aW9uGAwgASgLMicu",
"Y29tbXVuaWNhdG9yX29iamVjdHMuQ3VzdG9tT2JzZXJ2YXRpb25CH6oCHE1M",
"QWdlbnRzLkNvbW11bmljYXRvck9iamVjdHNiBnByb3RvMw=="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.CustomObservationReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.AgentInfoProto), global::MLAgents.CommunicatorObjects.AgentInfoProto.Parser, new[]{ "StackedVectorObservation", "VisualObservations", "TextObservation", "StoredVectorActions", "StoredTextActions", "Memories", "Reward", "Done", "MaxStepReached", "Id", "ActionMask", "CustomObservation" }, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class AgentInfoProto : pb::IMessage<AgentInfoProto> {
private static readonly pb::MessageParser<AgentInfoProto> _parser = new pb::MessageParser<AgentInfoProto>(() => new AgentInfoProto());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<AgentInfoProto> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.AgentInfoProtoReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AgentInfoProto() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AgentInfoProto(AgentInfoProto other) : this() {
stackedVectorObservation_ = other.stackedVectorObservation_.Clone();
visualObservations_ = other.visualObservations_.Clone();
textObservation_ = other.textObservation_;
storedVectorActions_ = other.storedVectorActions_.Clone();
storedTextActions_ = other.storedTextActions_;
memories_ = other.memories_.Clone();
reward_ = other.reward_;
done_ = other.done_;
maxStepReached_ = other.maxStepReached_;
id_ = other.id_;
actionMask_ = other.actionMask_.Clone();
customObservation_ = other.customObservation_ != null ? other.customObservation_.Clone() : null;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AgentInfoProto Clone() {
return new AgentInfoProto(this);
}
/// <summary>Field number for the "stacked_vector_observation" field.</summary>
public const int StackedVectorObservationFieldNumber = 1;
private static readonly pb::FieldCodec<float> _repeated_stackedVectorObservation_codec
= pb::FieldCodec.ForFloat(10);
private readonly pbc::RepeatedField<float> stackedVectorObservation_ = new pbc::RepeatedField<float>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<float> StackedVectorObservation {
get { return stackedVectorObservation_; }
}
/// <summary>Field number for the "visual_observations" field.</summary>
public const int VisualObservationsFieldNumber = 2;
private static readonly pb::FieldCodec<pb::ByteString> _repeated_visualObservations_codec
= pb::FieldCodec.ForBytes(18);
private readonly pbc::RepeatedField<pb::ByteString> visualObservations_ = new pbc::RepeatedField<pb::ByteString>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<pb::ByteString> VisualObservations {
get { return visualObservations_; }
}
/// <summary>Field number for the "text_observation" field.</summary>
public const int TextObservationFieldNumber = 3;
private string textObservation_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string TextObservation {
get { return textObservation_; }
set {
textObservation_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "stored_vector_actions" field.</summary>
public const int StoredVectorActionsFieldNumber = 4;
private static readonly pb::FieldCodec<float> _repeated_storedVectorActions_codec
= pb::FieldCodec.ForFloat(34);
private readonly pbc::RepeatedField<float> storedVectorActions_ = new pbc::RepeatedField<float>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<float> StoredVectorActions {
get { return storedVectorActions_; }
}
/// <summary>Field number for the "stored_text_actions" field.</summary>
public const int StoredTextActionsFieldNumber = 5;
private string storedTextActions_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string StoredTextActions {
get { return storedTextActions_; }
set {
storedTextActions_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "memories" field.</summary>
public const int MemoriesFieldNumber = 6;
private static readonly pb::FieldCodec<float> _repeated_memories_codec
= pb::FieldCodec.ForFloat(50);
private readonly pbc::RepeatedField<float> memories_ = new pbc::RepeatedField<float>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<float> Memories {
get { return memories_; }
}
/// <summary>Field number for the "reward" field.</summary>
public const int RewardFieldNumber = 7;
private float reward_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public float Reward {
get { return reward_; }
set {
reward_ = value;
}
}
/// <summary>Field number for the "done" field.</summary>
public const int DoneFieldNumber = 8;
private bool done_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Done {
get { return done_; }
set {
done_ = value;
}
}
/// <summary>Field number for the "max_step_reached" field.</summary>
public const int MaxStepReachedFieldNumber = 9;
private bool maxStepReached_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool MaxStepReached {
get { return maxStepReached_; }
set {
maxStepReached_ = value;
}
}
/// <summary>Field number for the "id" field.</summary>
public const int IdFieldNumber = 10;
private int id_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int Id {
get { return id_; }
set {
id_ = value;
}
}
/// <summary>Field number for the "action_mask" field.</summary>
public const int ActionMaskFieldNumber = 11;
private static readonly pb::FieldCodec<bool> _repeated_actionMask_codec
= pb::FieldCodec.ForBool(90);
private readonly pbc::RepeatedField<bool> actionMask_ = new pbc::RepeatedField<bool>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<bool> ActionMask {
get { return actionMask_; }
}
/// <summary>Field number for the "custom_observation" field.</summary>
public const int CustomObservationFieldNumber = 12;
private global::MLAgents.CommunicatorObjects.CustomObservation customObservation_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.CustomObservation CustomObservation {
get { return customObservation_; }
set {
customObservation_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as AgentInfoProto);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(AgentInfoProto other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if(!stackedVectorObservation_.Equals(other.stackedVectorObservation_)) return false;
if(!visualObservations_.Equals(other.visualObservations_)) return false;
if (TextObservation != other.TextObservation) return false;
if(!storedVectorActions_.Equals(other.storedVectorActions_)) return false;
if (StoredTextActions != other.StoredTextActions) return false;
if(!memories_.Equals(other.memories_)) return false;
if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(Reward, other.Reward)) return false;
if (Done != other.Done) return false;
if (MaxStepReached != other.MaxStepReached) return false;
if (Id != other.Id) return false;
if(!actionMask_.Equals(other.actionMask_)) return false;
if (!object.Equals(CustomObservation, other.CustomObservation)) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
hash ^= stackedVectorObservation_.GetHashCode();
hash ^= visualObservations_.GetHashCode();
if (TextObservation.Length != 0) hash ^= TextObservation.GetHashCode();
hash ^= storedVectorActions_.GetHashCode();
if (StoredTextActions.Length != 0) hash ^= StoredTextActions.GetHashCode();
hash ^= memories_.GetHashCode();
if (Reward != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(Reward);
if (Done != false) hash ^= Done.GetHashCode();
if (MaxStepReached != false) hash ^= MaxStepReached.GetHashCode();
if (Id != 0) hash ^= Id.GetHashCode();
hash ^= actionMask_.GetHashCode();
if (customObservation_ != null) hash ^= CustomObservation.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
stackedVectorObservation_.WriteTo(output, _repeated_stackedVectorObservation_codec);
visualObservations_.WriteTo(output, _repeated_visualObservations_codec);
if (TextObservation.Length != 0) {
output.WriteRawTag(26);
output.WriteString(TextObservation);
}
storedVectorActions_.WriteTo(output, _repeated_storedVectorActions_codec);
if (StoredTextActions.Length != 0) {
output.WriteRawTag(42);
output.WriteString(StoredTextActions);
}
memories_.WriteTo(output, _repeated_memories_codec);
if (Reward != 0F) {
output.WriteRawTag(61);
output.WriteFloat(Reward);
}
if (Done != false) {
output.WriteRawTag(64);
output.WriteBool(Done);
}
if (MaxStepReached != false) {
output.WriteRawTag(72);
output.WriteBool(MaxStepReached);
}
if (Id != 0) {
output.WriteRawTag(80);
output.WriteInt32(Id);
}
actionMask_.WriteTo(output, _repeated_actionMask_codec);
if (customObservation_ != null) {
output.WriteRawTag(98);
output.WriteMessage(CustomObservation);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
size += stackedVectorObservation_.CalculateSize(_repeated_stackedVectorObservation_codec);
size += visualObservations_.CalculateSize(_repeated_visualObservations_codec);
if (TextObservation.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(TextObservation);
}
size += storedVectorActions_.CalculateSize(_repeated_storedVectorActions_codec);
if (StoredTextActions.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(StoredTextActions);
}
size += memories_.CalculateSize(_repeated_memories_codec);
if (Reward != 0F) {
size += 1 + 4;
}
if (Done != false) {
size += 1 + 1;
}
if (MaxStepReached != false) {
size += 1 + 1;
}
if (Id != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(Id);
}
size += actionMask_.CalculateSize(_repeated_actionMask_codec);
if (customObservation_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(CustomObservation);
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(AgentInfoProto other) {
if (other == null) {
return;
}
stackedVectorObservation_.Add(other.stackedVectorObservation_);
visualObservations_.Add(other.visualObservations_);
if (other.TextObservation.Length != 0) {
TextObservation = other.TextObservation;
}
storedVectorActions_.Add(other.storedVectorActions_);
if (other.StoredTextActions.Length != 0) {
StoredTextActions = other.StoredTextActions;
}
memories_.Add(other.memories_);
if (other.Reward != 0F) {
Reward = other.Reward;
}
if (other.Done != false) {
Done = other.Done;
}
if (other.MaxStepReached != false) {
MaxStepReached = other.MaxStepReached;
}
if (other.Id != 0) {
Id = other.Id;
}
actionMask_.Add(other.actionMask_);
if (other.customObservation_ != null) {
if (customObservation_ == null) {
CustomObservation = new global::MLAgents.CommunicatorObjects.CustomObservation();
}
CustomObservation.MergeFrom(other.CustomObservation);
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 10:
case 13: {
stackedVectorObservation_.AddEntriesFrom(input, _repeated_stackedVectorObservation_codec);
break;
}
case 18: {
visualObservations_.AddEntriesFrom(input, _repeated_visualObservations_codec);
break;
}
case 26: {
TextObservation = input.ReadString();
break;
}
case 34:
case 37: {
storedVectorActions_.AddEntriesFrom(input, _repeated_storedVectorActions_codec);
break;
}
case 42: {
StoredTextActions = input.ReadString();
break;
}
case 50:
case 53: {
memories_.AddEntriesFrom(input, _repeated_memories_codec);
break;
}
case 61: {
Reward = input.ReadFloat();
break;
}
case 64: {
Done = input.ReadBool();
break;
}
case 72: {
MaxStepReached = input.ReadBool();
break;
}
case 80: {
Id = input.ReadInt32();
break;
}
case 90:
case 88: {
actionMask_.AddEntriesFrom(input, _repeated_actionMask_codec);
break;
}
case 98: {
if (customObservation_ == null) {
CustomObservation = new global::MLAgents.CommunicatorObjects.CustomObservation();
}
input.ReadMessage(CustomObservation);
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| 461 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/brain_parameters_proto.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/brain_parameters_proto.proto</summary>
public static partial class BrainParametersProtoReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/brain_parameters_proto.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static BrainParametersProtoReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"Cj9tbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2JyYWluX3Bh",
"cmFtZXRlcnNfcHJvdG8ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzGjlt",
"bGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3Jlc29sdXRpb25f",
"cHJvdG8ucHJvdG8aOW1sYWdlbnRzL2VudnMvY29tbXVuaWNhdG9yX29iamVj",
"dHMvc3BhY2VfdHlwZV9wcm90by5wcm90byLUAgoUQnJhaW5QYXJhbWV0ZXJz",
"UHJvdG8SHwoXdmVjdG9yX29ic2VydmF0aW9uX3NpemUYASABKAUSJwofbnVt",
"X3N0YWNrZWRfdmVjdG9yX29ic2VydmF0aW9ucxgCIAEoBRIaChJ2ZWN0b3Jf",
"YWN0aW9uX3NpemUYAyADKAUSQQoSY2FtZXJhX3Jlc29sdXRpb25zGAQgAygL",
"MiUuY29tbXVuaWNhdG9yX29iamVjdHMuUmVzb2x1dGlvblByb3RvEiIKGnZl",
"Y3Rvcl9hY3Rpb25fZGVzY3JpcHRpb25zGAUgAygJEkYKGHZlY3Rvcl9hY3Rp",
"b25fc3BhY2VfdHlwZRgGIAEoDjIkLmNvbW11bmljYXRvcl9vYmplY3RzLlNw",
"YWNlVHlwZVByb3RvEhIKCmJyYWluX25hbWUYByABKAkSEwoLaXNfdHJhaW5p",
"bmcYCCABKAhCH6oCHE1MQWdlbnRzLkNvbW11bmljYXRvck9iamVjdHNiBnBy",
"b3RvMw=="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.ResolutionProtoReflection.Descriptor, global::MLAgents.CommunicatorObjects.SpaceTypeProtoReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.BrainParametersProto), global::MLAgents.CommunicatorObjects.BrainParametersProto.Parser, new[]{ "VectorObservationSize", "NumStackedVectorObservations", "VectorActionSize", "CameraResolutions", "VectorActionDescriptions", "VectorActionSpaceType", "BrainName", "IsTraining" }, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class BrainParametersProto : pb::IMessage<BrainParametersProto> {
private static readonly pb::MessageParser<BrainParametersProto> _parser = new pb::MessageParser<BrainParametersProto>(() => new BrainParametersProto());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<BrainParametersProto> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.BrainParametersProtoReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public BrainParametersProto() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public BrainParametersProto(BrainParametersProto other) : this() {
vectorObservationSize_ = other.vectorObservationSize_;
numStackedVectorObservations_ = other.numStackedVectorObservations_;
vectorActionSize_ = other.vectorActionSize_.Clone();
cameraResolutions_ = other.cameraResolutions_.Clone();
vectorActionDescriptions_ = other.vectorActionDescriptions_.Clone();
vectorActionSpaceType_ = other.vectorActionSpaceType_;
brainName_ = other.brainName_;
isTraining_ = other.isTraining_;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public BrainParametersProto Clone() {
return new BrainParametersProto(this);
}
/// <summary>Field number for the "vector_observation_size" field.</summary>
public const int VectorObservationSizeFieldNumber = 1;
private int vectorObservationSize_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int VectorObservationSize {
get { return vectorObservationSize_; }
set {
vectorObservationSize_ = value;
}
}
/// <summary>Field number for the "num_stacked_vector_observations" field.</summary>
public const int NumStackedVectorObservationsFieldNumber = 2;
private int numStackedVectorObservations_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int NumStackedVectorObservations {
get { return numStackedVectorObservations_; }
set {
numStackedVectorObservations_ = value;
}
}
/// <summary>Field number for the "vector_action_size" field.</summary>
public const int VectorActionSizeFieldNumber = 3;
private static readonly pb::FieldCodec<int> _repeated_vectorActionSize_codec
= pb::FieldCodec.ForInt32(26);
private readonly pbc::RepeatedField<int> vectorActionSize_ = new pbc::RepeatedField<int>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<int> VectorActionSize {
get { return vectorActionSize_; }
}
/// <summary>Field number for the "camera_resolutions" field.</summary>
public const int CameraResolutionsFieldNumber = 4;
private static readonly pb::FieldCodec<global::MLAgents.CommunicatorObjects.ResolutionProto> _repeated_cameraResolutions_codec
= pb::FieldCodec.ForMessage(34, global::MLAgents.CommunicatorObjects.ResolutionProto.Parser);
private readonly pbc::RepeatedField<global::MLAgents.CommunicatorObjects.ResolutionProto> cameraResolutions_ = new pbc::RepeatedField<global::MLAgents.CommunicatorObjects.ResolutionProto>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<global::MLAgents.CommunicatorObjects.ResolutionProto> CameraResolutions {
get { return cameraResolutions_; }
}
/// <summary>Field number for the "vector_action_descriptions" field.</summary>
public const int VectorActionDescriptionsFieldNumber = 5;
private static readonly pb::FieldCodec<string> _repeated_vectorActionDescriptions_codec
= pb::FieldCodec.ForString(42);
private readonly pbc::RepeatedField<string> vectorActionDescriptions_ = new pbc::RepeatedField<string>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<string> VectorActionDescriptions {
get { return vectorActionDescriptions_; }
}
/// <summary>Field number for the "vector_action_space_type" field.</summary>
public const int VectorActionSpaceTypeFieldNumber = 6;
private global::MLAgents.CommunicatorObjects.SpaceTypeProto vectorActionSpaceType_ = 0;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.SpaceTypeProto VectorActionSpaceType {
get { return vectorActionSpaceType_; }
set {
vectorActionSpaceType_ = value;
}
}
/// <summary>Field number for the "brain_name" field.</summary>
public const int BrainNameFieldNumber = 7;
private string brainName_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string BrainName {
get { return brainName_; }
set {
brainName_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "is_training" field.</summary>
public const int IsTrainingFieldNumber = 8;
private bool isTraining_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool IsTraining {
get { return isTraining_; }
set {
isTraining_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as BrainParametersProto);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(BrainParametersProto other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (VectorObservationSize != other.VectorObservationSize) return false;
if (NumStackedVectorObservations != other.NumStackedVectorObservations) return false;
if(!vectorActionSize_.Equals(other.vectorActionSize_)) return false;
if(!cameraResolutions_.Equals(other.cameraResolutions_)) return false;
if(!vectorActionDescriptions_.Equals(other.vectorActionDescriptions_)) return false;
if (VectorActionSpaceType != other.VectorActionSpaceType) return false;
if (BrainName != other.BrainName) return false;
if (IsTraining != other.IsTraining) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (VectorObservationSize != 0) hash ^= VectorObservationSize.GetHashCode();
if (NumStackedVectorObservations != 0) hash ^= NumStackedVectorObservations.GetHashCode();
hash ^= vectorActionSize_.GetHashCode();
hash ^= cameraResolutions_.GetHashCode();
hash ^= vectorActionDescriptions_.GetHashCode();
if (VectorActionSpaceType != 0) hash ^= VectorActionSpaceType.GetHashCode();
if (BrainName.Length != 0) hash ^= BrainName.GetHashCode();
if (IsTraining != false) hash ^= IsTraining.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (VectorObservationSize != 0) {
output.WriteRawTag(8);
output.WriteInt32(VectorObservationSize);
}
if (NumStackedVectorObservations != 0) {
output.WriteRawTag(16);
output.WriteInt32(NumStackedVectorObservations);
}
vectorActionSize_.WriteTo(output, _repeated_vectorActionSize_codec);
cameraResolutions_.WriteTo(output, _repeated_cameraResolutions_codec);
vectorActionDescriptions_.WriteTo(output, _repeated_vectorActionDescriptions_codec);
if (VectorActionSpaceType != 0) {
output.WriteRawTag(48);
output.WriteEnum((int) VectorActionSpaceType);
}
if (BrainName.Length != 0) {
output.WriteRawTag(58);
output.WriteString(BrainName);
}
if (IsTraining != false) {
output.WriteRawTag(64);
output.WriteBool(IsTraining);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (VectorObservationSize != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(VectorObservationSize);
}
if (NumStackedVectorObservations != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(NumStackedVectorObservations);
}
size += vectorActionSize_.CalculateSize(_repeated_vectorActionSize_codec);
size += cameraResolutions_.CalculateSize(_repeated_cameraResolutions_codec);
size += vectorActionDescriptions_.CalculateSize(_repeated_vectorActionDescriptions_codec);
if (VectorActionSpaceType != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) VectorActionSpaceType);
}
if (BrainName.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(BrainName);
}
if (IsTraining != false) {
size += 1 + 1;
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(BrainParametersProto other) {
if (other == null) {
return;
}
if (other.VectorObservationSize != 0) {
VectorObservationSize = other.VectorObservationSize;
}
if (other.NumStackedVectorObservations != 0) {
NumStackedVectorObservations = other.NumStackedVectorObservations;
}
vectorActionSize_.Add(other.vectorActionSize_);
cameraResolutions_.Add(other.cameraResolutions_);
vectorActionDescriptions_.Add(other.vectorActionDescriptions_);
if (other.VectorActionSpaceType != 0) {
VectorActionSpaceType = other.VectorActionSpaceType;
}
if (other.BrainName.Length != 0) {
BrainName = other.BrainName;
}
if (other.IsTraining != false) {
IsTraining = other.IsTraining;
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 8: {
VectorObservationSize = input.ReadInt32();
break;
}
case 16: {
NumStackedVectorObservations = input.ReadInt32();
break;
}
case 26:
case 24: {
vectorActionSize_.AddEntriesFrom(input, _repeated_vectorActionSize_codec);
break;
}
case 34: {
cameraResolutions_.AddEntriesFrom(input, _repeated_cameraResolutions_codec);
break;
}
case 42: {
vectorActionDescriptions_.AddEntriesFrom(input, _repeated_vectorActionDescriptions_codec);
break;
}
case 48: {
VectorActionSpaceType = (global::MLAgents.CommunicatorObjects.SpaceTypeProto) input.ReadEnum();
break;
}
case 58: {
BrainName = input.ReadString();
break;
}
case 64: {
IsTraining = input.ReadBool();
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| 358 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/command_proto.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/command_proto.proto</summary>
public static partial class CommandProtoReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/command_proto.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static CommandProtoReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CjZtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2NvbW1hbmRf",
"cHJvdG8ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzKi0KDENvbW1hbmRQ",
"cm90bxIICgRTVEVQEAASCQoFUkVTRVQQARIICgRRVUlUEAJCH6oCHE1MQWdl",
"bnRzLkNvbW11bmljYXRvck9iamVjdHNiBnByb3RvMw=="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { },
new pbr::GeneratedClrTypeInfo(new[] {typeof(global::MLAgents.CommunicatorObjects.CommandProto), }, null));
}
#endregion
}
#region Enums
public enum CommandProto {
[pbr::OriginalName("STEP")] Step = 0,
[pbr::OriginalName("RESET")] Reset = 1,
[pbr::OriginalName("QUIT")] Quit = 2,
}
#endregion
}
#endregion Designer generated code
| 50 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/custom_action.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/custom_action.proto</summary>
public static partial class CustomActionReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/custom_action.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static CustomActionReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CjZtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2N1c3RvbV9h",
"Y3Rpb24ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzIg4KDEN1c3RvbUFj",
"dGlvbkIfqgIcTUxBZ2VudHMuQ29tbXVuaWNhdG9yT2JqZWN0c2IGcHJvdG8z"));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.CustomAction), global::MLAgents.CommunicatorObjects.CustomAction.Parser, null, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class CustomAction : pb::IMessage<CustomAction> {
private static readonly pb::MessageParser<CustomAction> _parser = new pb::MessageParser<CustomAction>(() => new CustomAction());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<CustomAction> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.CustomActionReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public CustomAction() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public CustomAction(CustomAction other) : this() {
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public CustomAction Clone() {
return new CustomAction(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as CustomAction);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(CustomAction other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(CustomAction other) {
if (other == null) {
return;
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
}
}
}
}
#endregion
}
#endregion Designer generated code
| 146 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/custom_observation.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/custom_observation.proto</summary>
public static partial class CustomObservationReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/custom_observation.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static CustomObservationReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CjttbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2N1c3RvbV9v",
"YnNlcnZhdGlvbi5wcm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMiEwoRQ3Vz",
"dG9tT2JzZXJ2YXRpb25CH6oCHE1MQWdlbnRzLkNvbW11bmljYXRvck9iamVj",
"dHNiBnByb3RvMw=="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.CustomObservation), global::MLAgents.CommunicatorObjects.CustomObservation.Parser, null, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class CustomObservation : pb::IMessage<CustomObservation> {
private static readonly pb::MessageParser<CustomObservation> _parser = new pb::MessageParser<CustomObservation>(() => new CustomObservation());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<CustomObservation> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.CustomObservationReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public CustomObservation() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public CustomObservation(CustomObservation other) : this() {
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public CustomObservation Clone() {
return new CustomObservation(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as CustomObservation);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(CustomObservation other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(CustomObservation other) {
if (other == null) {
return;
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
}
}
}
}
#endregion
}
#endregion Designer generated code
| 147 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/custom_reset_parameters.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/custom_reset_parameters.proto</summary>
public static partial class CustomResetParametersReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/custom_reset_parameters.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static CustomResetParametersReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CkBtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2N1c3RvbV9y",
"ZXNldF9wYXJhbWV0ZXJzLnByb3RvEhRjb21tdW5pY2F0b3Jfb2JqZWN0cyIX",
"ChVDdXN0b21SZXNldFBhcmFtZXRlcnNCH6oCHE1MQWdlbnRzLkNvbW11bmlj",
"YXRvck9iamVjdHNiBnByb3RvMw=="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.CustomResetParameters), global::MLAgents.CommunicatorObjects.CustomResetParameters.Parser, null, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class CustomResetParameters : pb::IMessage<CustomResetParameters> {
private static readonly pb::MessageParser<CustomResetParameters> _parser = new pb::MessageParser<CustomResetParameters>(() => new CustomResetParameters());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<CustomResetParameters> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.CustomResetParametersReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public CustomResetParameters() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public CustomResetParameters(CustomResetParameters other) : this() {
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public CustomResetParameters Clone() {
return new CustomResetParameters(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as CustomResetParameters);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(CustomResetParameters other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(CustomResetParameters other) {
if (other == null) {
return;
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
}
}
}
}
#endregion
}
#endregion Designer generated code
| 147 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/demonstration_meta_proto.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/demonstration_meta_proto.proto</summary>
public static partial class DemonstrationMetaProtoReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/demonstration_meta_proto.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static DemonstrationMetaProtoReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CkFtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2RlbW9uc3Ry",
"YXRpb25fbWV0YV9wcm90by5wcm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMi",
"jQEKFkRlbW9uc3RyYXRpb25NZXRhUHJvdG8SEwoLYXBpX3ZlcnNpb24YASAB",
"KAUSGgoSZGVtb25zdHJhdGlvbl9uYW1lGAIgASgJEhQKDG51bWJlcl9zdGVw",
"cxgDIAEoBRIXCg9udW1iZXJfZXBpc29kZXMYBCABKAUSEwoLbWVhbl9yZXdh",
"cmQYBSABKAJCH6oCHE1MQWdlbnRzLkNvbW11bmljYXRvck9iamVjdHNiBnBy",
"b3RvMw=="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.DemonstrationMetaProto), global::MLAgents.CommunicatorObjects.DemonstrationMetaProto.Parser, new[]{ "ApiVersion", "DemonstrationName", "NumberSteps", "NumberEpisodes", "MeanReward" }, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class DemonstrationMetaProto : pb::IMessage<DemonstrationMetaProto> {
private static readonly pb::MessageParser<DemonstrationMetaProto> _parser = new pb::MessageParser<DemonstrationMetaProto>(() => new DemonstrationMetaProto());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<DemonstrationMetaProto> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.DemonstrationMetaProtoReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public DemonstrationMetaProto() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public DemonstrationMetaProto(DemonstrationMetaProto other) : this() {
apiVersion_ = other.apiVersion_;
demonstrationName_ = other.demonstrationName_;
numberSteps_ = other.numberSteps_;
numberEpisodes_ = other.numberEpisodes_;
meanReward_ = other.meanReward_;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public DemonstrationMetaProto Clone() {
return new DemonstrationMetaProto(this);
}
/// <summary>Field number for the "api_version" field.</summary>
public const int ApiVersionFieldNumber = 1;
private int apiVersion_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int ApiVersion {
get { return apiVersion_; }
set {
apiVersion_ = value;
}
}
/// <summary>Field number for the "demonstration_name" field.</summary>
public const int DemonstrationNameFieldNumber = 2;
private string demonstrationName_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string DemonstrationName {
get { return demonstrationName_; }
set {
demonstrationName_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "number_steps" field.</summary>
public const int NumberStepsFieldNumber = 3;
private int numberSteps_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int NumberSteps {
get { return numberSteps_; }
set {
numberSteps_ = value;
}
}
/// <summary>Field number for the "number_episodes" field.</summary>
public const int NumberEpisodesFieldNumber = 4;
private int numberEpisodes_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int NumberEpisodes {
get { return numberEpisodes_; }
set {
numberEpisodes_ = value;
}
}
/// <summary>Field number for the "mean_reward" field.</summary>
public const int MeanRewardFieldNumber = 5;
private float meanReward_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public float MeanReward {
get { return meanReward_; }
set {
meanReward_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as DemonstrationMetaProto);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(DemonstrationMetaProto other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (ApiVersion != other.ApiVersion) return false;
if (DemonstrationName != other.DemonstrationName) return false;
if (NumberSteps != other.NumberSteps) return false;
if (NumberEpisodes != other.NumberEpisodes) return false;
if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(MeanReward, other.MeanReward)) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (ApiVersion != 0) hash ^= ApiVersion.GetHashCode();
if (DemonstrationName.Length != 0) hash ^= DemonstrationName.GetHashCode();
if (NumberSteps != 0) hash ^= NumberSteps.GetHashCode();
if (NumberEpisodes != 0) hash ^= NumberEpisodes.GetHashCode();
if (MeanReward != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(MeanReward);
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (ApiVersion != 0) {
output.WriteRawTag(8);
output.WriteInt32(ApiVersion);
}
if (DemonstrationName.Length != 0) {
output.WriteRawTag(18);
output.WriteString(DemonstrationName);
}
if (NumberSteps != 0) {
output.WriteRawTag(24);
output.WriteInt32(NumberSteps);
}
if (NumberEpisodes != 0) {
output.WriteRawTag(32);
output.WriteInt32(NumberEpisodes);
}
if (MeanReward != 0F) {
output.WriteRawTag(45);
output.WriteFloat(MeanReward);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (ApiVersion != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(ApiVersion);
}
if (DemonstrationName.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(DemonstrationName);
}
if (NumberSteps != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(NumberSteps);
}
if (NumberEpisodes != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(NumberEpisodes);
}
if (MeanReward != 0F) {
size += 1 + 4;
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(DemonstrationMetaProto other) {
if (other == null) {
return;
}
if (other.ApiVersion != 0) {
ApiVersion = other.ApiVersion;
}
if (other.DemonstrationName.Length != 0) {
DemonstrationName = other.DemonstrationName;
}
if (other.NumberSteps != 0) {
NumberSteps = other.NumberSteps;
}
if (other.NumberEpisodes != 0) {
NumberEpisodes = other.NumberEpisodes;
}
if (other.MeanReward != 0F) {
MeanReward = other.MeanReward;
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 8: {
ApiVersion = input.ReadInt32();
break;
}
case 18: {
DemonstrationName = input.ReadString();
break;
}
case 24: {
NumberSteps = input.ReadInt32();
break;
}
case 32: {
NumberEpisodes = input.ReadInt32();
break;
}
case 45: {
MeanReward = input.ReadFloat();
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| 290 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/engine_configuration_proto.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/engine_configuration_proto.proto</summary>
public static partial class EngineConfigurationProtoReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/engine_configuration_proto.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static EngineConfigurationProtoReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CkNtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2VuZ2luZV9j",
"b25maWd1cmF0aW9uX3Byb3RvLnByb3RvEhRjb21tdW5pY2F0b3Jfb2JqZWN0",
"cyKVAQoYRW5naW5lQ29uZmlndXJhdGlvblByb3RvEg0KBXdpZHRoGAEgASgF",
"Eg4KBmhlaWdodBgCIAEoBRIVCg1xdWFsaXR5X2xldmVsGAMgASgFEhIKCnRp",
"bWVfc2NhbGUYBCABKAISGQoRdGFyZ2V0X2ZyYW1lX3JhdGUYBSABKAUSFAoM",
"c2hvd19tb25pdG9yGAYgASgIQh+qAhxNTEFnZW50cy5Db21tdW5pY2F0b3JP",
"YmplY3RzYgZwcm90bzM="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.EngineConfigurationProto), global::MLAgents.CommunicatorObjects.EngineConfigurationProto.Parser, new[]{ "Width", "Height", "QualityLevel", "TimeScale", "TargetFrameRate", "ShowMonitor" }, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class EngineConfigurationProto : pb::IMessage<EngineConfigurationProto> {
private static readonly pb::MessageParser<EngineConfigurationProto> _parser = new pb::MessageParser<EngineConfigurationProto>(() => new EngineConfigurationProto());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<EngineConfigurationProto> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.EngineConfigurationProtoReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public EngineConfigurationProto() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public EngineConfigurationProto(EngineConfigurationProto other) : this() {
width_ = other.width_;
height_ = other.height_;
qualityLevel_ = other.qualityLevel_;
timeScale_ = other.timeScale_;
targetFrameRate_ = other.targetFrameRate_;
showMonitor_ = other.showMonitor_;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public EngineConfigurationProto Clone() {
return new EngineConfigurationProto(this);
}
/// <summary>Field number for the "width" field.</summary>
public const int WidthFieldNumber = 1;
private int width_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int Width {
get { return width_; }
set {
width_ = value;
}
}
/// <summary>Field number for the "height" field.</summary>
public const int HeightFieldNumber = 2;
private int height_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int Height {
get { return height_; }
set {
height_ = value;
}
}
/// <summary>Field number for the "quality_level" field.</summary>
public const int QualityLevelFieldNumber = 3;
private int qualityLevel_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int QualityLevel {
get { return qualityLevel_; }
set {
qualityLevel_ = value;
}
}
/// <summary>Field number for the "time_scale" field.</summary>
public const int TimeScaleFieldNumber = 4;
private float timeScale_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public float TimeScale {
get { return timeScale_; }
set {
timeScale_ = value;
}
}
/// <summary>Field number for the "target_frame_rate" field.</summary>
public const int TargetFrameRateFieldNumber = 5;
private int targetFrameRate_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int TargetFrameRate {
get { return targetFrameRate_; }
set {
targetFrameRate_ = value;
}
}
/// <summary>Field number for the "show_monitor" field.</summary>
public const int ShowMonitorFieldNumber = 6;
private bool showMonitor_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool ShowMonitor {
get { return showMonitor_; }
set {
showMonitor_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as EngineConfigurationProto);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(EngineConfigurationProto other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Width != other.Width) return false;
if (Height != other.Height) return false;
if (QualityLevel != other.QualityLevel) return false;
if (!pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.Equals(TimeScale, other.TimeScale)) return false;
if (TargetFrameRate != other.TargetFrameRate) return false;
if (ShowMonitor != other.ShowMonitor) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Width != 0) hash ^= Width.GetHashCode();
if (Height != 0) hash ^= Height.GetHashCode();
if (QualityLevel != 0) hash ^= QualityLevel.GetHashCode();
if (TimeScale != 0F) hash ^= pbc::ProtobufEqualityComparers.BitwiseSingleEqualityComparer.GetHashCode(TimeScale);
if (TargetFrameRate != 0) hash ^= TargetFrameRate.GetHashCode();
if (ShowMonitor != false) hash ^= ShowMonitor.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Width != 0) {
output.WriteRawTag(8);
output.WriteInt32(Width);
}
if (Height != 0) {
output.WriteRawTag(16);
output.WriteInt32(Height);
}
if (QualityLevel != 0) {
output.WriteRawTag(24);
output.WriteInt32(QualityLevel);
}
if (TimeScale != 0F) {
output.WriteRawTag(37);
output.WriteFloat(TimeScale);
}
if (TargetFrameRate != 0) {
output.WriteRawTag(40);
output.WriteInt32(TargetFrameRate);
}
if (ShowMonitor != false) {
output.WriteRawTag(48);
output.WriteBool(ShowMonitor);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Width != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(Width);
}
if (Height != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(Height);
}
if (QualityLevel != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(QualityLevel);
}
if (TimeScale != 0F) {
size += 1 + 4;
}
if (TargetFrameRate != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(TargetFrameRate);
}
if (ShowMonitor != false) {
size += 1 + 1;
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(EngineConfigurationProto other) {
if (other == null) {
return;
}
if (other.Width != 0) {
Width = other.Width;
}
if (other.Height != 0) {
Height = other.Height;
}
if (other.QualityLevel != 0) {
QualityLevel = other.QualityLevel;
}
if (other.TimeScale != 0F) {
TimeScale = other.TimeScale;
}
if (other.TargetFrameRate != 0) {
TargetFrameRate = other.TargetFrameRate;
}
if (other.ShowMonitor != false) {
ShowMonitor = other.ShowMonitor;
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 8: {
Width = input.ReadInt32();
break;
}
case 16: {
Height = input.ReadInt32();
break;
}
case 24: {
QualityLevel = input.ReadInt32();
break;
}
case 37: {
TimeScale = input.ReadFloat();
break;
}
case 40: {
TargetFrameRate = input.ReadInt32();
break;
}
case 48: {
ShowMonitor = input.ReadBool();
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| 318 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/environment_parameters_proto.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/environment_parameters_proto.proto</summary>
public static partial class EnvironmentParametersProtoReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/environment_parameters_proto.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static EnvironmentParametersProtoReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CkVtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2Vudmlyb25t",
"ZW50X3BhcmFtZXRlcnNfcHJvdG8ucHJvdG8SFGNvbW11bmljYXRvcl9vYmpl",
"Y3RzGkBtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2N1c3Rv",
"bV9yZXNldF9wYXJhbWV0ZXJzLnByb3RvIoMCChpFbnZpcm9ubWVudFBhcmFt",
"ZXRlcnNQcm90bxJfChBmbG9hdF9wYXJhbWV0ZXJzGAEgAygLMkUuY29tbXVu",
"aWNhdG9yX29iamVjdHMuRW52aXJvbm1lbnRQYXJhbWV0ZXJzUHJvdG8uRmxv",
"YXRQYXJhbWV0ZXJzRW50cnkSTAoXY3VzdG9tX3Jlc2V0X3BhcmFtZXRlcnMY",
"AiABKAsyKy5jb21tdW5pY2F0b3Jfb2JqZWN0cy5DdXN0b21SZXNldFBhcmFt",
"ZXRlcnMaNgoURmxvYXRQYXJhbWV0ZXJzRW50cnkSCwoDa2V5GAEgASgJEg0K",
"BXZhbHVlGAIgASgCOgI4AUIfqgIcTUxBZ2VudHMuQ29tbXVuaWNhdG9yT2Jq",
"ZWN0c2IGcHJvdG8z"));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.CustomResetParametersReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.EnvironmentParametersProto), global::MLAgents.CommunicatorObjects.EnvironmentParametersProto.Parser, new[]{ "FloatParameters", "CustomResetParameters" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, })
}));
}
#endregion
}
#region Messages
public sealed partial class EnvironmentParametersProto : pb::IMessage<EnvironmentParametersProto> {
private static readonly pb::MessageParser<EnvironmentParametersProto> _parser = new pb::MessageParser<EnvironmentParametersProto>(() => new EnvironmentParametersProto());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<EnvironmentParametersProto> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.EnvironmentParametersProtoReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public EnvironmentParametersProto() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public EnvironmentParametersProto(EnvironmentParametersProto other) : this() {
floatParameters_ = other.floatParameters_.Clone();
customResetParameters_ = other.customResetParameters_ != null ? other.customResetParameters_.Clone() : null;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public EnvironmentParametersProto Clone() {
return new EnvironmentParametersProto(this);
}
/// <summary>Field number for the "float_parameters" field.</summary>
public const int FloatParametersFieldNumber = 1;
private static readonly pbc::MapField<string, float>.Codec _map_floatParameters_codec
= new pbc::MapField<string, float>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForFloat(21), 10);
private readonly pbc::MapField<string, float> floatParameters_ = new pbc::MapField<string, float>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<string, float> FloatParameters {
get { return floatParameters_; }
}
/// <summary>Field number for the "custom_reset_parameters" field.</summary>
public const int CustomResetParametersFieldNumber = 2;
private global::MLAgents.CommunicatorObjects.CustomResetParameters customResetParameters_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.CustomResetParameters CustomResetParameters {
get { return customResetParameters_; }
set {
customResetParameters_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as EnvironmentParametersProto);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(EnvironmentParametersProto other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!FloatParameters.Equals(other.FloatParameters)) return false;
if (!object.Equals(CustomResetParameters, other.CustomResetParameters)) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
hash ^= FloatParameters.GetHashCode();
if (customResetParameters_ != null) hash ^= CustomResetParameters.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
floatParameters_.WriteTo(output, _map_floatParameters_codec);
if (customResetParameters_ != null) {
output.WriteRawTag(18);
output.WriteMessage(CustomResetParameters);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
size += floatParameters_.CalculateSize(_map_floatParameters_codec);
if (customResetParameters_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(CustomResetParameters);
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(EnvironmentParametersProto other) {
if (other == null) {
return;
}
floatParameters_.Add(other.floatParameters_);
if (other.customResetParameters_ != null) {
if (customResetParameters_ == null) {
CustomResetParameters = new global::MLAgents.CommunicatorObjects.CustomResetParameters();
}
CustomResetParameters.MergeFrom(other.CustomResetParameters);
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 10: {
floatParameters_.AddEntriesFrom(input, _map_floatParameters_codec);
break;
}
case 18: {
if (customResetParameters_ == null) {
CustomResetParameters = new global::MLAgents.CommunicatorObjects.CustomResetParameters();
}
input.ReadMessage(CustomResetParameters);
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| 208 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/header.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/header.proto</summary>
public static partial class HeaderReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/header.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static HeaderReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"Ci9tbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2hlYWRlci5w",
"cm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMiKQoGSGVhZGVyEg4KBnN0YXR1",
"cxgBIAEoBRIPCgdtZXNzYWdlGAIgASgJQh+qAhxNTEFnZW50cy5Db21tdW5p",
"Y2F0b3JPYmplY3RzYgZwcm90bzM="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.Header), global::MLAgents.CommunicatorObjects.Header.Parser, new[]{ "Status", "Message" }, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class Header : pb::IMessage<Header> {
private static readonly pb::MessageParser<Header> _parser = new pb::MessageParser<Header>(() => new Header());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<Header> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.HeaderReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Header() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Header(Header other) : this() {
status_ = other.status_;
message_ = other.message_;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Header Clone() {
return new Header(this);
}
/// <summary>Field number for the "status" field.</summary>
public const int StatusFieldNumber = 1;
private int status_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int Status {
get { return status_; }
set {
status_ = value;
}
}
/// <summary>Field number for the "message" field.</summary>
public const int MessageFieldNumber = 2;
private string message_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Message {
get { return message_; }
set {
message_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as Header);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(Header other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Status != other.Status) return false;
if (Message != other.Message) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Status != 0) hash ^= Status.GetHashCode();
if (Message.Length != 0) hash ^= Message.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Status != 0) {
output.WriteRawTag(8);
output.WriteInt32(Status);
}
if (Message.Length != 0) {
output.WriteRawTag(18);
output.WriteString(Message);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Status != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(Status);
}
if (Message.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Message);
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(Header other) {
if (other == null) {
return;
}
if (other.Status != 0) {
Status = other.Status;
}
if (other.Message.Length != 0) {
Message = other.Message;
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 8: {
Status = input.ReadInt32();
break;
}
case 18: {
Message = input.ReadString();
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| 203 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/resolution_proto.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/resolution_proto.proto</summary>
public static partial class ResolutionProtoReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/resolution_proto.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static ResolutionProtoReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CjltbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3Jlc29sdXRp",
"b25fcHJvdG8ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzIkQKD1Jlc29s",
"dXRpb25Qcm90bxINCgV3aWR0aBgBIAEoBRIOCgZoZWlnaHQYAiABKAUSEgoK",
"Z3JheV9zY2FsZRgDIAEoCEIfqgIcTUxBZ2VudHMuQ29tbXVuaWNhdG9yT2Jq",
"ZWN0c2IGcHJvdG8z"));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.ResolutionProto), global::MLAgents.CommunicatorObjects.ResolutionProto.Parser, new[]{ "Width", "Height", "GrayScale" }, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class ResolutionProto : pb::IMessage<ResolutionProto> {
private static readonly pb::MessageParser<ResolutionProto> _parser = new pb::MessageParser<ResolutionProto>(() => new ResolutionProto());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<ResolutionProto> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.ResolutionProtoReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ResolutionProto() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ResolutionProto(ResolutionProto other) : this() {
width_ = other.width_;
height_ = other.height_;
grayScale_ = other.grayScale_;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ResolutionProto Clone() {
return new ResolutionProto(this);
}
/// <summary>Field number for the "width" field.</summary>
public const int WidthFieldNumber = 1;
private int width_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int Width {
get { return width_; }
set {
width_ = value;
}
}
/// <summary>Field number for the "height" field.</summary>
public const int HeightFieldNumber = 2;
private int height_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int Height {
get { return height_; }
set {
height_ = value;
}
}
/// <summary>Field number for the "gray_scale" field.</summary>
public const int GrayScaleFieldNumber = 3;
private bool grayScale_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool GrayScale {
get { return grayScale_; }
set {
grayScale_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as ResolutionProto);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(ResolutionProto other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Width != other.Width) return false;
if (Height != other.Height) return false;
if (GrayScale != other.GrayScale) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Width != 0) hash ^= Width.GetHashCode();
if (Height != 0) hash ^= Height.GetHashCode();
if (GrayScale != false) hash ^= GrayScale.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Width != 0) {
output.WriteRawTag(8);
output.WriteInt32(Width);
}
if (Height != 0) {
output.WriteRawTag(16);
output.WriteInt32(Height);
}
if (GrayScale != false) {
output.WriteRawTag(24);
output.WriteBool(GrayScale);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Width != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(Width);
}
if (Height != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(Height);
}
if (GrayScale != false) {
size += 1 + 1;
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(ResolutionProto other) {
if (other == null) {
return;
}
if (other.Width != 0) {
Width = other.Width;
}
if (other.Height != 0) {
Height = other.Height;
}
if (other.GrayScale != false) {
GrayScale = other.GrayScale;
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 8: {
Width = input.ReadInt32();
break;
}
case 16: {
Height = input.ReadInt32();
break;
}
case 24: {
GrayScale = input.ReadBool();
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| 232 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/space_type_proto.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/space_type_proto.proto</summary>
public static partial class SpaceTypeProtoReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/space_type_proto.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static SpaceTypeProtoReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CjltbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3NwYWNlX3R5",
"cGVfcHJvdG8ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzGjltbGFnZW50",
"cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3Jlc29sdXRpb25fcHJvdG8u",
"cHJvdG8qLgoOU3BhY2VUeXBlUHJvdG8SDAoIZGlzY3JldGUQABIOCgpjb250",
"aW51b3VzEAFCH6oCHE1MQWdlbnRzLkNvbW11bmljYXRvck9iamVjdHNiBnBy",
"b3RvMw=="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.ResolutionProtoReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(new[] {typeof(global::MLAgents.CommunicatorObjects.SpaceTypeProto), }, null));
}
#endregion
}
#region Enums
public enum SpaceTypeProto {
[pbr::OriginalName("discrete")] Discrete = 0,
[pbr::OriginalName("continuous")] Continuous = 1,
}
#endregion
}
#endregion Designer generated code
| 51 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/unity_input.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/unity_input.proto</summary>
public static partial class UnityInputReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/unity_input.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static UnityInputReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CjRtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3VuaXR5X2lu",
"cHV0LnByb3RvEhRjb21tdW5pY2F0b3Jfb2JqZWN0cxo3bWxhZ2VudHMvZW52",
"cy9jb21tdW5pY2F0b3Jfb2JqZWN0cy91bml0eV9ybF9pbnB1dC5wcm90bxpG",
"bWxhZ2VudHMvZW52cy9jb21tdW5pY2F0b3Jfb2JqZWN0cy91bml0eV9ybF9p",
"bml0aWFsaXphdGlvbl9pbnB1dC5wcm90byKVAQoKVW5pdHlJbnB1dBI0Cghy",
"bF9pbnB1dBgBIAEoCzIiLmNvbW11bmljYXRvcl9vYmplY3RzLlVuaXR5UkxJ",
"bnB1dBJRChdybF9pbml0aWFsaXphdGlvbl9pbnB1dBgCIAEoCzIwLmNvbW11",
"bmljYXRvcl9vYmplY3RzLlVuaXR5UkxJbml0aWFsaXphdGlvbklucHV0Qh+q",
"AhxNTEFnZW50cy5Db21tdW5pY2F0b3JPYmplY3RzYgZwcm90bzM="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.UnityRlInputReflection.Descriptor, global::MLAgents.CommunicatorObjects.UnityRlInitializationInputReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityInput), global::MLAgents.CommunicatorObjects.UnityInput.Parser, new[]{ "RlInput", "RlInitializationInput" }, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class UnityInput : pb::IMessage<UnityInput> {
private static readonly pb::MessageParser<UnityInput> _parser = new pb::MessageParser<UnityInput>(() => new UnityInput());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<UnityInput> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.UnityInputReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityInput() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityInput(UnityInput other) : this() {
rlInput_ = other.rlInput_ != null ? other.rlInput_.Clone() : null;
rlInitializationInput_ = other.rlInitializationInput_ != null ? other.rlInitializationInput_.Clone() : null;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityInput Clone() {
return new UnityInput(this);
}
/// <summary>Field number for the "rl_input" field.</summary>
public const int RlInputFieldNumber = 1;
private global::MLAgents.CommunicatorObjects.UnityRLInput rlInput_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.UnityRLInput RlInput {
get { return rlInput_; }
set {
rlInput_ = value;
}
}
/// <summary>Field number for the "rl_initialization_input" field.</summary>
public const int RlInitializationInputFieldNumber = 2;
private global::MLAgents.CommunicatorObjects.UnityRLInitializationInput rlInitializationInput_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.UnityRLInitializationInput RlInitializationInput {
get { return rlInitializationInput_; }
set {
rlInitializationInput_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as UnityInput);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(UnityInput other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!object.Equals(RlInput, other.RlInput)) return false;
if (!object.Equals(RlInitializationInput, other.RlInitializationInput)) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (rlInput_ != null) hash ^= RlInput.GetHashCode();
if (rlInitializationInput_ != null) hash ^= RlInitializationInput.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (rlInput_ != null) {
output.WriteRawTag(10);
output.WriteMessage(RlInput);
}
if (rlInitializationInput_ != null) {
output.WriteRawTag(18);
output.WriteMessage(RlInitializationInput);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (rlInput_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(RlInput);
}
if (rlInitializationInput_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(RlInitializationInput);
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(UnityInput other) {
if (other == null) {
return;
}
if (other.rlInput_ != null) {
if (rlInput_ == null) {
RlInput = new global::MLAgents.CommunicatorObjects.UnityRLInput();
}
RlInput.MergeFrom(other.RlInput);
}
if (other.rlInitializationInput_ != null) {
if (rlInitializationInput_ == null) {
RlInitializationInput = new global::MLAgents.CommunicatorObjects.UnityRLInitializationInput();
}
RlInitializationInput.MergeFrom(other.RlInitializationInput);
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 10: {
if (rlInput_ == null) {
RlInput = new global::MLAgents.CommunicatorObjects.UnityRLInput();
}
input.ReadMessage(RlInput);
break;
}
case 18: {
if (rlInitializationInput_ == null) {
RlInitializationInput = new global::MLAgents.CommunicatorObjects.UnityRLInitializationInput();
}
input.ReadMessage(RlInitializationInput);
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| 220 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/unity_message.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/unity_message.proto</summary>
public static partial class UnityMessageReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/unity_message.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static UnityMessageReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CjZtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3VuaXR5X21l",
"c3NhZ2UucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzGjVtbGFnZW50cy9l",
"bnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3VuaXR5X291dHB1dC5wcm90bxo0",
"bWxhZ2VudHMvZW52cy9jb21tdW5pY2F0b3Jfb2JqZWN0cy91bml0eV9pbnB1",
"dC5wcm90bxovbWxhZ2VudHMvZW52cy9jb21tdW5pY2F0b3Jfb2JqZWN0cy9o",
"ZWFkZXIucHJvdG8irAEKDFVuaXR5TWVzc2FnZRIsCgZoZWFkZXIYASABKAsy",
"HC5jb21tdW5pY2F0b3Jfb2JqZWN0cy5IZWFkZXISNwoMdW5pdHlfb3V0cHV0",
"GAIgASgLMiEuY29tbXVuaWNhdG9yX29iamVjdHMuVW5pdHlPdXRwdXQSNQoL",
"dW5pdHlfaW5wdXQYAyABKAsyIC5jb21tdW5pY2F0b3Jfb2JqZWN0cy5Vbml0",
"eUlucHV0Qh+qAhxNTEFnZW50cy5Db21tdW5pY2F0b3JPYmplY3RzYgZwcm90",
"bzM="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.UnityOutputReflection.Descriptor, global::MLAgents.CommunicatorObjects.UnityInputReflection.Descriptor, global::MLAgents.CommunicatorObjects.HeaderReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityMessage), global::MLAgents.CommunicatorObjects.UnityMessage.Parser, new[]{ "Header", "UnityOutput", "UnityInput" }, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class UnityMessage : pb::IMessage<UnityMessage> {
private static readonly pb::MessageParser<UnityMessage> _parser = new pb::MessageParser<UnityMessage>(() => new UnityMessage());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<UnityMessage> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.UnityMessageReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityMessage() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityMessage(UnityMessage other) : this() {
header_ = other.header_ != null ? other.header_.Clone() : null;
unityOutput_ = other.unityOutput_ != null ? other.unityOutput_.Clone() : null;
unityInput_ = other.unityInput_ != null ? other.unityInput_.Clone() : null;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityMessage Clone() {
return new UnityMessage(this);
}
/// <summary>Field number for the "header" field.</summary>
public const int HeaderFieldNumber = 1;
private global::MLAgents.CommunicatorObjects.Header header_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.Header Header {
get { return header_; }
set {
header_ = value;
}
}
/// <summary>Field number for the "unity_output" field.</summary>
public const int UnityOutputFieldNumber = 2;
private global::MLAgents.CommunicatorObjects.UnityOutput unityOutput_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.UnityOutput UnityOutput {
get { return unityOutput_; }
set {
unityOutput_ = value;
}
}
/// <summary>Field number for the "unity_input" field.</summary>
public const int UnityInputFieldNumber = 3;
private global::MLAgents.CommunicatorObjects.UnityInput unityInput_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.UnityInput UnityInput {
get { return unityInput_; }
set {
unityInput_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as UnityMessage);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(UnityMessage other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!object.Equals(Header, other.Header)) return false;
if (!object.Equals(UnityOutput, other.UnityOutput)) return false;
if (!object.Equals(UnityInput, other.UnityInput)) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (header_ != null) hash ^= Header.GetHashCode();
if (unityOutput_ != null) hash ^= UnityOutput.GetHashCode();
if (unityInput_ != null) hash ^= UnityInput.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (header_ != null) {
output.WriteRawTag(10);
output.WriteMessage(Header);
}
if (unityOutput_ != null) {
output.WriteRawTag(18);
output.WriteMessage(UnityOutput);
}
if (unityInput_ != null) {
output.WriteRawTag(26);
output.WriteMessage(UnityInput);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (header_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(Header);
}
if (unityOutput_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(UnityOutput);
}
if (unityInput_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(UnityInput);
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(UnityMessage other) {
if (other == null) {
return;
}
if (other.header_ != null) {
if (header_ == null) {
Header = new global::MLAgents.CommunicatorObjects.Header();
}
Header.MergeFrom(other.Header);
}
if (other.unityOutput_ != null) {
if (unityOutput_ == null) {
UnityOutput = new global::MLAgents.CommunicatorObjects.UnityOutput();
}
UnityOutput.MergeFrom(other.UnityOutput);
}
if (other.unityInput_ != null) {
if (unityInput_ == null) {
UnityInput = new global::MLAgents.CommunicatorObjects.UnityInput();
}
UnityInput.MergeFrom(other.UnityInput);
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 10: {
if (header_ == null) {
Header = new global::MLAgents.CommunicatorObjects.Header();
}
input.ReadMessage(Header);
break;
}
case 18: {
if (unityOutput_ == null) {
UnityOutput = new global::MLAgents.CommunicatorObjects.UnityOutput();
}
input.ReadMessage(UnityOutput);
break;
}
case 26: {
if (unityInput_ == null) {
UnityInput = new global::MLAgents.CommunicatorObjects.UnityInput();
}
input.ReadMessage(UnityInput);
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| 256 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/unity_output.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/unity_output.proto</summary>
public static partial class UnityOutputReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/unity_output.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static UnityOutputReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CjVtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3VuaXR5X291",
"dHB1dC5wcm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMaOG1sYWdlbnRzL2Vu",
"dnMvY29tbXVuaWNhdG9yX29iamVjdHMvdW5pdHlfcmxfb3V0cHV0LnByb3Rv",
"GkdtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3VuaXR5X3Js",
"X2luaXRpYWxpemF0aW9uX291dHB1dC5wcm90byKaAQoLVW5pdHlPdXRwdXQS",
"NgoJcmxfb3V0cHV0GAEgASgLMiMuY29tbXVuaWNhdG9yX29iamVjdHMuVW5p",
"dHlSTE91dHB1dBJTChhybF9pbml0aWFsaXphdGlvbl9vdXRwdXQYAiABKAsy",
"MS5jb21tdW5pY2F0b3Jfb2JqZWN0cy5Vbml0eVJMSW5pdGlhbGl6YXRpb25P",
"dXRwdXRCH6oCHE1MQWdlbnRzLkNvbW11bmljYXRvck9iamVjdHNiBnByb3Rv",
"Mw=="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.UnityRlOutputReflection.Descriptor, global::MLAgents.CommunicatorObjects.UnityRlInitializationOutputReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityOutput), global::MLAgents.CommunicatorObjects.UnityOutput.Parser, new[]{ "RlOutput", "RlInitializationOutput" }, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class UnityOutput : pb::IMessage<UnityOutput> {
private static readonly pb::MessageParser<UnityOutput> _parser = new pb::MessageParser<UnityOutput>(() => new UnityOutput());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<UnityOutput> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.UnityOutputReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityOutput() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityOutput(UnityOutput other) : this() {
rlOutput_ = other.rlOutput_ != null ? other.rlOutput_.Clone() : null;
rlInitializationOutput_ = other.rlInitializationOutput_ != null ? other.rlInitializationOutput_.Clone() : null;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityOutput Clone() {
return new UnityOutput(this);
}
/// <summary>Field number for the "rl_output" field.</summary>
public const int RlOutputFieldNumber = 1;
private global::MLAgents.CommunicatorObjects.UnityRLOutput rlOutput_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.UnityRLOutput RlOutput {
get { return rlOutput_; }
set {
rlOutput_ = value;
}
}
/// <summary>Field number for the "rl_initialization_output" field.</summary>
public const int RlInitializationOutputFieldNumber = 2;
private global::MLAgents.CommunicatorObjects.UnityRLInitializationOutput rlInitializationOutput_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.UnityRLInitializationOutput RlInitializationOutput {
get { return rlInitializationOutput_; }
set {
rlInitializationOutput_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as UnityOutput);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(UnityOutput other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!object.Equals(RlOutput, other.RlOutput)) return false;
if (!object.Equals(RlInitializationOutput, other.RlInitializationOutput)) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (rlOutput_ != null) hash ^= RlOutput.GetHashCode();
if (rlInitializationOutput_ != null) hash ^= RlInitializationOutput.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (rlOutput_ != null) {
output.WriteRawTag(10);
output.WriteMessage(RlOutput);
}
if (rlInitializationOutput_ != null) {
output.WriteRawTag(18);
output.WriteMessage(RlInitializationOutput);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (rlOutput_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(RlOutput);
}
if (rlInitializationOutput_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(RlInitializationOutput);
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(UnityOutput other) {
if (other == null) {
return;
}
if (other.rlOutput_ != null) {
if (rlOutput_ == null) {
RlOutput = new global::MLAgents.CommunicatorObjects.UnityRLOutput();
}
RlOutput.MergeFrom(other.RlOutput);
}
if (other.rlInitializationOutput_ != null) {
if (rlInitializationOutput_ == null) {
RlInitializationOutput = new global::MLAgents.CommunicatorObjects.UnityRLInitializationOutput();
}
RlInitializationOutput.MergeFrom(other.RlInitializationOutput);
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 10: {
if (rlOutput_ == null) {
RlOutput = new global::MLAgents.CommunicatorObjects.UnityRLOutput();
}
input.ReadMessage(RlOutput);
break;
}
case 18: {
if (rlInitializationOutput_ == null) {
RlInitializationOutput = new global::MLAgents.CommunicatorObjects.UnityRLInitializationOutput();
}
input.ReadMessage(RlInitializationOutput);
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| 221 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/unity_rl_initialization_input.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/unity_rl_initialization_input.proto</summary>
public static partial class UnityRlInitializationInputReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/unity_rl_initialization_input.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static UnityRlInitializationInputReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CkZtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3VuaXR5X3Js",
"X2luaXRpYWxpemF0aW9uX2lucHV0LnByb3RvEhRjb21tdW5pY2F0b3Jfb2Jq",
"ZWN0cyIqChpVbml0eVJMSW5pdGlhbGl6YXRpb25JbnB1dBIMCgRzZWVkGAEg",
"ASgFQh+qAhxNTEFnZW50cy5Db21tdW5pY2F0b3JPYmplY3RzYgZwcm90bzM="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityRLInitializationInput), global::MLAgents.CommunicatorObjects.UnityRLInitializationInput.Parser, new[]{ "Seed" }, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class UnityRLInitializationInput : pb::IMessage<UnityRLInitializationInput> {
private static readonly pb::MessageParser<UnityRLInitializationInput> _parser = new pb::MessageParser<UnityRLInitializationInput>(() => new UnityRLInitializationInput());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<UnityRLInitializationInput> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.UnityRlInitializationInputReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityRLInitializationInput() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityRLInitializationInput(UnityRLInitializationInput other) : this() {
seed_ = other.seed_;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityRLInitializationInput Clone() {
return new UnityRLInitializationInput(this);
}
/// <summary>Field number for the "seed" field.</summary>
public const int SeedFieldNumber = 1;
private int seed_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int Seed {
get { return seed_; }
set {
seed_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as UnityRLInitializationInput);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(UnityRLInitializationInput other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Seed != other.Seed) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Seed != 0) hash ^= Seed.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Seed != 0) {
output.WriteRawTag(8);
output.WriteInt32(Seed);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Seed != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(Seed);
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(UnityRLInitializationInput other) {
if (other == null) {
return;
}
if (other.Seed != 0) {
Seed = other.Seed;
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 8: {
Seed = input.ReadInt32();
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| 175 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/unity_rl_initialization_output.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/unity_rl_initialization_output.proto</summary>
public static partial class UnityRlInitializationOutputReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/unity_rl_initialization_output.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static UnityRlInitializationOutputReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CkdtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3VuaXR5X3Js",
"X2luaXRpYWxpemF0aW9uX291dHB1dC5wcm90bxIUY29tbXVuaWNhdG9yX29i",
"amVjdHMaP21sYWdlbnRzL2VudnMvY29tbXVuaWNhdG9yX29iamVjdHMvYnJh",
"aW5fcGFyYW1ldGVyc19wcm90by5wcm90bxpFbWxhZ2VudHMvZW52cy9jb21t",
"dW5pY2F0b3Jfb2JqZWN0cy9lbnZpcm9ubWVudF9wYXJhbWV0ZXJzX3Byb3Rv",
"LnByb3RvIuYBChtVbml0eVJMSW5pdGlhbGl6YXRpb25PdXRwdXQSDAoEbmFt",
"ZRgBIAEoCRIPCgd2ZXJzaW9uGAIgASgJEhAKCGxvZ19wYXRoGAMgASgJEkQK",
"EGJyYWluX3BhcmFtZXRlcnMYBSADKAsyKi5jb21tdW5pY2F0b3Jfb2JqZWN0",
"cy5CcmFpblBhcmFtZXRlcnNQcm90bxJQChZlbnZpcm9ubWVudF9wYXJhbWV0",
"ZXJzGAYgASgLMjAuY29tbXVuaWNhdG9yX29iamVjdHMuRW52aXJvbm1lbnRQ",
"YXJhbWV0ZXJzUHJvdG9CH6oCHE1MQWdlbnRzLkNvbW11bmljYXRvck9iamVj",
"dHNiBnByb3RvMw=="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.BrainParametersProtoReflection.Descriptor, global::MLAgents.CommunicatorObjects.EnvironmentParametersProtoReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityRLInitializationOutput), global::MLAgents.CommunicatorObjects.UnityRLInitializationOutput.Parser, new[]{ "Name", "Version", "LogPath", "BrainParameters", "EnvironmentParameters" }, null, null, null)
}));
}
#endregion
}
#region Messages
/// <summary>
/// The request message containing the academy's parameters.
/// </summary>
public sealed partial class UnityRLInitializationOutput : pb::IMessage<UnityRLInitializationOutput> {
private static readonly pb::MessageParser<UnityRLInitializationOutput> _parser = new pb::MessageParser<UnityRLInitializationOutput>(() => new UnityRLInitializationOutput());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<UnityRLInitializationOutput> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.UnityRlInitializationOutputReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityRLInitializationOutput() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityRLInitializationOutput(UnityRLInitializationOutput other) : this() {
name_ = other.name_;
version_ = other.version_;
logPath_ = other.logPath_;
brainParameters_ = other.brainParameters_.Clone();
environmentParameters_ = other.environmentParameters_ != null ? other.environmentParameters_.Clone() : null;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityRLInitializationOutput Clone() {
return new UnityRLInitializationOutput(this);
}
/// <summary>Field number for the "name" field.</summary>
public const int NameFieldNumber = 1;
private string name_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Name {
get { return name_; }
set {
name_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "version" field.</summary>
public const int VersionFieldNumber = 2;
private string version_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Version {
get { return version_; }
set {
version_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "log_path" field.</summary>
public const int LogPathFieldNumber = 3;
private string logPath_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string LogPath {
get { return logPath_; }
set {
logPath_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "brain_parameters" field.</summary>
public const int BrainParametersFieldNumber = 5;
private static readonly pb::FieldCodec<global::MLAgents.CommunicatorObjects.BrainParametersProto> _repeated_brainParameters_codec
= pb::FieldCodec.ForMessage(42, global::MLAgents.CommunicatorObjects.BrainParametersProto.Parser);
private readonly pbc::RepeatedField<global::MLAgents.CommunicatorObjects.BrainParametersProto> brainParameters_ = new pbc::RepeatedField<global::MLAgents.CommunicatorObjects.BrainParametersProto>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<global::MLAgents.CommunicatorObjects.BrainParametersProto> BrainParameters {
get { return brainParameters_; }
}
/// <summary>Field number for the "environment_parameters" field.</summary>
public const int EnvironmentParametersFieldNumber = 6;
private global::MLAgents.CommunicatorObjects.EnvironmentParametersProto environmentParameters_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.EnvironmentParametersProto EnvironmentParameters {
get { return environmentParameters_; }
set {
environmentParameters_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as UnityRLInitializationOutput);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(UnityRLInitializationOutput other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Name != other.Name) return false;
if (Version != other.Version) return false;
if (LogPath != other.LogPath) return false;
if(!brainParameters_.Equals(other.brainParameters_)) return false;
if (!object.Equals(EnvironmentParameters, other.EnvironmentParameters)) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Name.Length != 0) hash ^= Name.GetHashCode();
if (Version.Length != 0) hash ^= Version.GetHashCode();
if (LogPath.Length != 0) hash ^= LogPath.GetHashCode();
hash ^= brainParameters_.GetHashCode();
if (environmentParameters_ != null) hash ^= EnvironmentParameters.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Name.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Name);
}
if (Version.Length != 0) {
output.WriteRawTag(18);
output.WriteString(Version);
}
if (LogPath.Length != 0) {
output.WriteRawTag(26);
output.WriteString(LogPath);
}
brainParameters_.WriteTo(output, _repeated_brainParameters_codec);
if (environmentParameters_ != null) {
output.WriteRawTag(50);
output.WriteMessage(EnvironmentParameters);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Name.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Name);
}
if (Version.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Version);
}
if (LogPath.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(LogPath);
}
size += brainParameters_.CalculateSize(_repeated_brainParameters_codec);
if (environmentParameters_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(EnvironmentParameters);
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(UnityRLInitializationOutput other) {
if (other == null) {
return;
}
if (other.Name.Length != 0) {
Name = other.Name;
}
if (other.Version.Length != 0) {
Version = other.Version;
}
if (other.LogPath.Length != 0) {
LogPath = other.LogPath;
}
brainParameters_.Add(other.brainParameters_);
if (other.environmentParameters_ != null) {
if (environmentParameters_ == null) {
EnvironmentParameters = new global::MLAgents.CommunicatorObjects.EnvironmentParametersProto();
}
EnvironmentParameters.MergeFrom(other.EnvironmentParameters);
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 10: {
Name = input.ReadString();
break;
}
case 18: {
Version = input.ReadString();
break;
}
case 26: {
LogPath = input.ReadString();
break;
}
case 42: {
brainParameters_.AddEntriesFrom(input, _repeated_brainParameters_codec);
break;
}
case 50: {
if (environmentParameters_ == null) {
EnvironmentParameters = new global::MLAgents.CommunicatorObjects.EnvironmentParametersProto();
}
input.ReadMessage(EnvironmentParameters);
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| 296 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/unity_rl_input.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/unity_rl_input.proto</summary>
public static partial class UnityRlInputReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/unity_rl_input.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static UnityRlInputReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CjdtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3VuaXR5X3Js",
"X2lucHV0LnByb3RvEhRjb21tdW5pY2F0b3Jfb2JqZWN0cxo7bWxhZ2VudHMv",
"ZW52cy9jb21tdW5pY2F0b3Jfb2JqZWN0cy9hZ2VudF9hY3Rpb25fcHJvdG8u",
"cHJvdG8aRW1sYWdlbnRzL2VudnMvY29tbXVuaWNhdG9yX29iamVjdHMvZW52",
"aXJvbm1lbnRfcGFyYW1ldGVyc19wcm90by5wcm90bxo2bWxhZ2VudHMvZW52",
"cy9jb21tdW5pY2F0b3Jfb2JqZWN0cy9jb21tYW5kX3Byb3RvLnByb3RvIrQD",
"CgxVbml0eVJMSW5wdXQSSwoNYWdlbnRfYWN0aW9ucxgBIAMoCzI0LmNvbW11",
"bmljYXRvcl9vYmplY3RzLlVuaXR5UkxJbnB1dC5BZ2VudEFjdGlvbnNFbnRy",
"eRJQChZlbnZpcm9ubWVudF9wYXJhbWV0ZXJzGAIgASgLMjAuY29tbXVuaWNh",
"dG9yX29iamVjdHMuRW52aXJvbm1lbnRQYXJhbWV0ZXJzUHJvdG8SEwoLaXNf",
"dHJhaW5pbmcYAyABKAgSMwoHY29tbWFuZBgEIAEoDjIiLmNvbW11bmljYXRv",
"cl9vYmplY3RzLkNvbW1hbmRQcm90bxpNChRMaXN0QWdlbnRBY3Rpb25Qcm90",
"bxI1CgV2YWx1ZRgBIAMoCzImLmNvbW11bmljYXRvcl9vYmplY3RzLkFnZW50",
"QWN0aW9uUHJvdG8abAoRQWdlbnRBY3Rpb25zRW50cnkSCwoDa2V5GAEgASgJ",
"EkYKBXZhbHVlGAIgASgLMjcuY29tbXVuaWNhdG9yX29iamVjdHMuVW5pdHlS",
"TElucHV0Lkxpc3RBZ2VudEFjdGlvblByb3RvOgI4AUIfqgIcTUxBZ2VudHMu",
"Q29tbXVuaWNhdG9yT2JqZWN0c2IGcHJvdG8z"));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.AgentActionProtoReflection.Descriptor, global::MLAgents.CommunicatorObjects.EnvironmentParametersProtoReflection.Descriptor, global::MLAgents.CommunicatorObjects.CommandProtoReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityRLInput), global::MLAgents.CommunicatorObjects.UnityRLInput.Parser, new[]{ "AgentActions", "EnvironmentParameters", "IsTraining", "Command" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityRLInput.Types.ListAgentActionProto), global::MLAgents.CommunicatorObjects.UnityRLInput.Types.ListAgentActionProto.Parser, new[]{ "Value" }, null, null, null),
null, })
}));
}
#endregion
}
#region Messages
public sealed partial class UnityRLInput : pb::IMessage<UnityRLInput> {
private static readonly pb::MessageParser<UnityRLInput> _parser = new pb::MessageParser<UnityRLInput>(() => new UnityRLInput());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<UnityRLInput> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.UnityRlInputReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityRLInput() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityRLInput(UnityRLInput other) : this() {
agentActions_ = other.agentActions_.Clone();
environmentParameters_ = other.environmentParameters_ != null ? other.environmentParameters_.Clone() : null;
isTraining_ = other.isTraining_;
command_ = other.command_;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityRLInput Clone() {
return new UnityRLInput(this);
}
/// <summary>Field number for the "agent_actions" field.</summary>
public const int AgentActionsFieldNumber = 1;
private static readonly pbc::MapField<string, global::MLAgents.CommunicatorObjects.UnityRLInput.Types.ListAgentActionProto>.Codec _map_agentActions_codec
= new pbc::MapField<string, global::MLAgents.CommunicatorObjects.UnityRLInput.Types.ListAgentActionProto>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForMessage(18, global::MLAgents.CommunicatorObjects.UnityRLInput.Types.ListAgentActionProto.Parser), 10);
private readonly pbc::MapField<string, global::MLAgents.CommunicatorObjects.UnityRLInput.Types.ListAgentActionProto> agentActions_ = new pbc::MapField<string, global::MLAgents.CommunicatorObjects.UnityRLInput.Types.ListAgentActionProto>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<string, global::MLAgents.CommunicatorObjects.UnityRLInput.Types.ListAgentActionProto> AgentActions {
get { return agentActions_; }
}
/// <summary>Field number for the "environment_parameters" field.</summary>
public const int EnvironmentParametersFieldNumber = 2;
private global::MLAgents.CommunicatorObjects.EnvironmentParametersProto environmentParameters_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.EnvironmentParametersProto EnvironmentParameters {
get { return environmentParameters_; }
set {
environmentParameters_ = value;
}
}
/// <summary>Field number for the "is_training" field.</summary>
public const int IsTrainingFieldNumber = 3;
private bool isTraining_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool IsTraining {
get { return isTraining_; }
set {
isTraining_ = value;
}
}
/// <summary>Field number for the "command" field.</summary>
public const int CommandFieldNumber = 4;
private global::MLAgents.CommunicatorObjects.CommandProto command_ = 0;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.CommandProto Command {
get { return command_; }
set {
command_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as UnityRLInput);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(UnityRLInput other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!AgentActions.Equals(other.AgentActions)) return false;
if (!object.Equals(EnvironmentParameters, other.EnvironmentParameters)) return false;
if (IsTraining != other.IsTraining) return false;
if (Command != other.Command) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
hash ^= AgentActions.GetHashCode();
if (environmentParameters_ != null) hash ^= EnvironmentParameters.GetHashCode();
if (IsTraining != false) hash ^= IsTraining.GetHashCode();
if (Command != 0) hash ^= Command.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
agentActions_.WriteTo(output, _map_agentActions_codec);
if (environmentParameters_ != null) {
output.WriteRawTag(18);
output.WriteMessage(EnvironmentParameters);
}
if (IsTraining != false) {
output.WriteRawTag(24);
output.WriteBool(IsTraining);
}
if (Command != 0) {
output.WriteRawTag(32);
output.WriteEnum((int) Command);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
size += agentActions_.CalculateSize(_map_agentActions_codec);
if (environmentParameters_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(EnvironmentParameters);
}
if (IsTraining != false) {
size += 1 + 1;
}
if (Command != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Command);
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(UnityRLInput other) {
if (other == null) {
return;
}
agentActions_.Add(other.agentActions_);
if (other.environmentParameters_ != null) {
if (environmentParameters_ == null) {
EnvironmentParameters = new global::MLAgents.CommunicatorObjects.EnvironmentParametersProto();
}
EnvironmentParameters.MergeFrom(other.EnvironmentParameters);
}
if (other.IsTraining != false) {
IsTraining = other.IsTraining;
}
if (other.Command != 0) {
Command = other.Command;
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 10: {
agentActions_.AddEntriesFrom(input, _map_agentActions_codec);
break;
}
case 18: {
if (environmentParameters_ == null) {
EnvironmentParameters = new global::MLAgents.CommunicatorObjects.EnvironmentParametersProto();
}
input.ReadMessage(EnvironmentParameters);
break;
}
case 24: {
IsTraining = input.ReadBool();
break;
}
case 32: {
Command = (global::MLAgents.CommunicatorObjects.CommandProto) input.ReadEnum();
break;
}
}
}
}
#region Nested types
/// <summary>Container for nested types declared in the UnityRLInput message type.</summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static partial class Types {
public sealed partial class ListAgentActionProto : pb::IMessage<ListAgentActionProto> {
private static readonly pb::MessageParser<ListAgentActionProto> _parser = new pb::MessageParser<ListAgentActionProto>(() => new ListAgentActionProto());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<ListAgentActionProto> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.UnityRLInput.Descriptor.NestedTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ListAgentActionProto() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ListAgentActionProto(ListAgentActionProto other) : this() {
value_ = other.value_.Clone();
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ListAgentActionProto Clone() {
return new ListAgentActionProto(this);
}
/// <summary>Field number for the "value" field.</summary>
public const int ValueFieldNumber = 1;
private static readonly pb::FieldCodec<global::MLAgents.CommunicatorObjects.AgentActionProto> _repeated_value_codec
= pb::FieldCodec.ForMessage(10, global::MLAgents.CommunicatorObjects.AgentActionProto.Parser);
private readonly pbc::RepeatedField<global::MLAgents.CommunicatorObjects.AgentActionProto> value_ = new pbc::RepeatedField<global::MLAgents.CommunicatorObjects.AgentActionProto>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<global::MLAgents.CommunicatorObjects.AgentActionProto> Value {
get { return value_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as ListAgentActionProto);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(ListAgentActionProto other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if(!value_.Equals(other.value_)) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
hash ^= value_.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
value_.WriteTo(output, _repeated_value_codec);
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
size += value_.CalculateSize(_repeated_value_codec);
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(ListAgentActionProto other) {
if (other == null) {
return;
}
value_.Add(other.value_);
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 10: {
value_.AddEntriesFrom(input, _repeated_value_codec);
break;
}
}
}
}
}
}
#endregion
}
#endregion
}
#endregion Designer generated code
| 399 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/unity_rl_output.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/unity_rl_output.proto</summary>
public static partial class UnityRlOutputReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/unity_rl_output.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static UnityRlOutputReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CjhtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3VuaXR5X3Js",
"X291dHB1dC5wcm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMaOW1sYWdlbnRz",
"L2VudnMvY29tbXVuaWNhdG9yX29iamVjdHMvYWdlbnRfaW5mb19wcm90by5w",
"cm90byKjAgoNVW5pdHlSTE91dHB1dBITCgtnbG9iYWxfZG9uZRgBIAEoCBJH",
"CgphZ2VudEluZm9zGAIgAygLMjMuY29tbXVuaWNhdG9yX29iamVjdHMuVW5p",
"dHlSTE91dHB1dC5BZ2VudEluZm9zRW50cnkaSQoSTGlzdEFnZW50SW5mb1By",
"b3RvEjMKBXZhbHVlGAEgAygLMiQuY29tbXVuaWNhdG9yX29iamVjdHMuQWdl",
"bnRJbmZvUHJvdG8aaQoPQWdlbnRJbmZvc0VudHJ5EgsKA2tleRgBIAEoCRJF",
"CgV2YWx1ZRgCIAEoCzI2LmNvbW11bmljYXRvcl9vYmplY3RzLlVuaXR5UkxP",
"dXRwdXQuTGlzdEFnZW50SW5mb1Byb3RvOgI4AUIfqgIcTUxBZ2VudHMuQ29t",
"bXVuaWNhdG9yT2JqZWN0c2IGcHJvdG8z"));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.AgentInfoProtoReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityRLOutput), global::MLAgents.CommunicatorObjects.UnityRLOutput.Parser, new[]{ "GlobalDone", "AgentInfos" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityRLOutput.Types.ListAgentInfoProto), global::MLAgents.CommunicatorObjects.UnityRLOutput.Types.ListAgentInfoProto.Parser, new[]{ "Value" }, null, null, null),
null, })
}));
}
#endregion
}
#region Messages
public sealed partial class UnityRLOutput : pb::IMessage<UnityRLOutput> {
private static readonly pb::MessageParser<UnityRLOutput> _parser = new pb::MessageParser<UnityRLOutput>(() => new UnityRLOutput());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<UnityRLOutput> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.UnityRlOutputReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityRLOutput() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityRLOutput(UnityRLOutput other) : this() {
globalDone_ = other.globalDone_;
agentInfos_ = other.agentInfos_.Clone();
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UnityRLOutput Clone() {
return new UnityRLOutput(this);
}
/// <summary>Field number for the "global_done" field.</summary>
public const int GlobalDoneFieldNumber = 1;
private bool globalDone_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool GlobalDone {
get { return globalDone_; }
set {
globalDone_ = value;
}
}
/// <summary>Field number for the "agentInfos" field.</summary>
public const int AgentInfosFieldNumber = 2;
private static readonly pbc::MapField<string, global::MLAgents.CommunicatorObjects.UnityRLOutput.Types.ListAgentInfoProto>.Codec _map_agentInfos_codec
= new pbc::MapField<string, global::MLAgents.CommunicatorObjects.UnityRLOutput.Types.ListAgentInfoProto>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForMessage(18, global::MLAgents.CommunicatorObjects.UnityRLOutput.Types.ListAgentInfoProto.Parser), 18);
private readonly pbc::MapField<string, global::MLAgents.CommunicatorObjects.UnityRLOutput.Types.ListAgentInfoProto> agentInfos_ = new pbc::MapField<string, global::MLAgents.CommunicatorObjects.UnityRLOutput.Types.ListAgentInfoProto>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<string, global::MLAgents.CommunicatorObjects.UnityRLOutput.Types.ListAgentInfoProto> AgentInfos {
get { return agentInfos_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as UnityRLOutput);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(UnityRLOutput other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (GlobalDone != other.GlobalDone) return false;
if (!AgentInfos.Equals(other.AgentInfos)) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (GlobalDone != false) hash ^= GlobalDone.GetHashCode();
hash ^= AgentInfos.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (GlobalDone != false) {
output.WriteRawTag(8);
output.WriteBool(GlobalDone);
}
agentInfos_.WriteTo(output, _map_agentInfos_codec);
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (GlobalDone != false) {
size += 1 + 1;
}
size += agentInfos_.CalculateSize(_map_agentInfos_codec);
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(UnityRLOutput other) {
if (other == null) {
return;
}
if (other.GlobalDone != false) {
GlobalDone = other.GlobalDone;
}
agentInfos_.Add(other.agentInfos_);
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 8: {
GlobalDone = input.ReadBool();
break;
}
case 18: {
agentInfos_.AddEntriesFrom(input, _map_agentInfos_codec);
break;
}
}
}
}
#region Nested types
/// <summary>Container for nested types declared in the UnityRLOutput message type.</summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static partial class Types {
public sealed partial class ListAgentInfoProto : pb::IMessage<ListAgentInfoProto> {
private static readonly pb::MessageParser<ListAgentInfoProto> _parser = new pb::MessageParser<ListAgentInfoProto>(() => new ListAgentInfoProto());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<ListAgentInfoProto> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::MLAgents.CommunicatorObjects.UnityRLOutput.Descriptor.NestedTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ListAgentInfoProto() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ListAgentInfoProto(ListAgentInfoProto other) : this() {
value_ = other.value_.Clone();
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ListAgentInfoProto Clone() {
return new ListAgentInfoProto(this);
}
/// <summary>Field number for the "value" field.</summary>
public const int ValueFieldNumber = 1;
private static readonly pb::FieldCodec<global::MLAgents.CommunicatorObjects.AgentInfoProto> _repeated_value_codec
= pb::FieldCodec.ForMessage(10, global::MLAgents.CommunicatorObjects.AgentInfoProto.Parser);
private readonly pbc::RepeatedField<global::MLAgents.CommunicatorObjects.AgentInfoProto> value_ = new pbc::RepeatedField<global::MLAgents.CommunicatorObjects.AgentInfoProto>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<global::MLAgents.CommunicatorObjects.AgentInfoProto> Value {
get { return value_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as ListAgentInfoProto);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(ListAgentInfoProto other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if(!value_.Equals(other.value_)) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
hash ^= value_.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
value_.WriteTo(output, _repeated_value_codec);
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
size += value_.CalculateSize(_repeated_value_codec);
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(ListAgentInfoProto other) {
if (other == null) {
return;
}
value_.Add(other.value_);
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 10: {
value_.AddEntriesFrom(input, _repeated_value_codec);
break;
}
}
}
}
}
}
#endregion
}
#endregion
}
#endregion Designer generated code
| 331 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/unity_to_external.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace MLAgents.CommunicatorObjects {
/// <summary>Holder for reflection information generated from mlagents/envs/communicator_objects/unity_to_external.proto</summary>
public static partial class UnityToExternalReflection {
#region Descriptor
/// <summary>File descriptor for mlagents/envs/communicator_objects/unity_to_external.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static UnityToExternalReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CjptbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3VuaXR5X3Rv",
"X2V4dGVybmFsLnByb3RvEhRjb21tdW5pY2F0b3Jfb2JqZWN0cxo2bWxhZ2Vu",
"dHMvZW52cy9jb21tdW5pY2F0b3Jfb2JqZWN0cy91bml0eV9tZXNzYWdlLnBy",
"b3RvMmcKD1VuaXR5VG9FeHRlcm5hbBJUCghFeGNoYW5nZRIiLmNvbW11bmlj",
"YXRvcl9vYmplY3RzLlVuaXR5TWVzc2FnZRoiLmNvbW11bmljYXRvcl9vYmpl",
"Y3RzLlVuaXR5TWVzc2FnZSIAQh+qAhxNTEFnZW50cy5Db21tdW5pY2F0b3JP",
"YmplY3RzYgZwcm90bzM="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.UnityMessageReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, null));
}
#endregion
}
}
#endregion Designer generated code
| 44 |
ml-agents | openai | C# | // <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: mlagents/envs/communicator_objects/unity_to_external.proto
// </auto-generated>
# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX
#pragma warning disable 0414, 1591
#region Designer generated code
using grpc = global::Grpc.Core;
namespace MLAgents.CommunicatorObjects {
public static partial class UnityToExternal
{
static readonly string __ServiceName = "communicator_objects.UnityToExternal";
static readonly grpc::Marshaller<global::MLAgents.CommunicatorObjects.UnityMessage> __Marshaller_communicator_objects_UnityMessage = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::MLAgents.CommunicatorObjects.UnityMessage.Parser.ParseFrom);
static readonly grpc::Method<global::MLAgents.CommunicatorObjects.UnityMessage, global::MLAgents.CommunicatorObjects.UnityMessage> __Method_Exchange = new grpc::Method<global::MLAgents.CommunicatorObjects.UnityMessage, global::MLAgents.CommunicatorObjects.UnityMessage>(
grpc::MethodType.Unary,
__ServiceName,
"Exchange",
__Marshaller_communicator_objects_UnityMessage,
__Marshaller_communicator_objects_UnityMessage);
/// <summary>Service descriptor</summary>
public static global::Google.Protobuf.Reflection.ServiceDescriptor Descriptor
{
get { return global::MLAgents.CommunicatorObjects.UnityToExternalReflection.Descriptor.Services[0]; }
}
/// <summary>Base class for server-side implementations of UnityToExternal</summary>
public abstract partial class UnityToExternalBase
{
/// <summary>
/// Sends the academy parameters
/// </summary>
/// <param name="request">The request received from the client.</param>
/// <param name="context">The context of the server-side call handler being invoked.</param>
/// <returns>The response to send back to the client (wrapped by a task).</returns>
public virtual global::System.Threading.Tasks.Task<global::MLAgents.CommunicatorObjects.UnityMessage> Exchange(global::MLAgents.CommunicatorObjects.UnityMessage request, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
}
/// <summary>Client for UnityToExternal</summary>
public partial class UnityToExternalClient : grpc::ClientBase<UnityToExternalClient>
{
/// <summary>Creates a new client for UnityToExternal</summary>
/// <param name="channel">The channel to use to make remote calls.</param>
public UnityToExternalClient(grpc::Channel channel) : base(channel)
{
}
/// <summary>Creates a new client for UnityToExternal that uses a custom <c>CallInvoker</c>.</summary>
/// <param name="callInvoker">The callInvoker to use to make remote calls.</param>
public UnityToExternalClient(grpc::CallInvoker callInvoker) : base(callInvoker)
{
}
/// <summary>Protected parameterless constructor to allow creation of test doubles.</summary>
protected UnityToExternalClient() : base()
{
}
/// <summary>Protected constructor to allow creation of configured clients.</summary>
/// <param name="configuration">The client configuration.</param>
protected UnityToExternalClient(ClientBaseConfiguration configuration) : base(configuration)
{
}
/// <summary>
/// Sends the academy parameters
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="headers">The initial metadata to send with the call. This parameter is optional.</param>
/// <param name="deadline">An optional deadline for the call. The call will be cancelled if deadline is hit.</param>
/// <param name="cancellationToken">An optional token for canceling the call.</param>
/// <returns>The response received from the server.</returns>
public virtual global::MLAgents.CommunicatorObjects.UnityMessage Exchange(global::MLAgents.CommunicatorObjects.UnityMessage request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return Exchange(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Sends the academy parameters
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="options">The options for the call.</param>
/// <returns>The response received from the server.</returns>
public virtual global::MLAgents.CommunicatorObjects.UnityMessage Exchange(global::MLAgents.CommunicatorObjects.UnityMessage request, grpc::CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_Exchange, null, options, request);
}
/// <summary>
/// Sends the academy parameters
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="headers">The initial metadata to send with the call. This parameter is optional.</param>
/// <param name="deadline">An optional deadline for the call. The call will be cancelled if deadline is hit.</param>
/// <param name="cancellationToken">An optional token for canceling the call.</param>
/// <returns>The call object.</returns>
public virtual grpc::AsyncUnaryCall<global::MLAgents.CommunicatorObjects.UnityMessage> ExchangeAsync(global::MLAgents.CommunicatorObjects.UnityMessage request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return ExchangeAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Sends the academy parameters
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="options">The options for the call.</param>
/// <returns>The call object.</returns>
public virtual grpc::AsyncUnaryCall<global::MLAgents.CommunicatorObjects.UnityMessage> ExchangeAsync(global::MLAgents.CommunicatorObjects.UnityMessage request, grpc::CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_Exchange, null, options, request);
}
/// <summary>Creates a new instance of client from given <c>ClientBaseConfiguration</c>.</summary>
protected override UnityToExternalClient NewInstance(ClientBaseConfiguration configuration)
{
return new UnityToExternalClient(configuration);
}
}
/// <summary>Creates service definition that can be registered with a server</summary>
/// <param name="serviceImpl">An object implementing the server-side handling logic.</param>
public static grpc::ServerServiceDefinition BindService(UnityToExternalBase serviceImpl)
{
return grpc::ServerServiceDefinition.CreateBuilder()
.AddMethod(__Method_Exchange, serviceImpl.Exchange).Build();
}
}
}
#endregion
#endif | 137 |
ml-agents | openai | C# | using System;
using System.Collections.Generic;
using System.Linq;
using Barracuda;
using MLAgents.InferenceBrain.Utils;
using UnityEngine;
namespace MLAgents.InferenceBrain
{
/// <summary>
/// The Applier for the Continuous Action output tensor. Tensor is assumed to contain the
/// continuous action data of the agents in the batch.
/// </summary>
public class ContinuousActionOutputApplier : TensorApplier.IApplier
{
public void Apply(TensorProxy tensorProxy, Dictionary<Agent, AgentInfo> agentInfo)
{
var actionSize = tensorProxy.shape[tensorProxy.shape.Length - 1];
var agentIndex = 0;
foreach (var agent in agentInfo.Keys)
{
var action = new float[actionSize];
for (var j = 0; j < actionSize; j++)
{
action[j] = tensorProxy.data[agentIndex, j];
}
agent.UpdateVectorAction(action);
agentIndex++;
}
}
}
/// <summary>
/// The Applier for the Discrete Action output tensor. Uses multinomial to sample discrete
/// actions from the logits contained in the tensor.
/// </summary>
public class DiscreteActionOutputApplier : TensorApplier.IApplier
{
private readonly int[] m_ActionSize;
private readonly Multinomial m_Multinomial;
private readonly ITensorAllocator m_Allocator;
public DiscreteActionOutputApplier(int[] actionSize, int seed, ITensorAllocator allocator)
{
m_ActionSize = actionSize;
m_Multinomial = new Multinomial(seed);
m_Allocator = allocator;
}
public void Apply(TensorProxy tensorProxy, Dictionary<Agent, AgentInfo> agentInfo)
{
//var tensorDataProbabilities = tensorProxy.Data as float[,];
var batchSize = agentInfo.Keys.Count;
var actions = new float[batchSize, m_ActionSize.Length];
var startActionIndices = Utilities.CumSum(m_ActionSize);
for (var actionIndex = 0; actionIndex < m_ActionSize.Length; actionIndex++)
{
var nBranchAction = m_ActionSize[actionIndex];
var actionProbs = new TensorProxy()
{
valueType = TensorProxy.TensorType.FloatingPoint,
shape = new long[] {batchSize, nBranchAction},
data = m_Allocator.Alloc(new TensorShape(batchSize, nBranchAction))
};
for (var batchIndex = 0; batchIndex < batchSize; batchIndex++)
{
for (var branchActionIndex = 0;
branchActionIndex < nBranchAction;
branchActionIndex++)
{
actionProbs.data[batchIndex, branchActionIndex] =
tensorProxy.data[batchIndex, startActionIndices[actionIndex] + branchActionIndex];
}
}
var outputTensor = new TensorProxy()
{
valueType = TensorProxy.TensorType.FloatingPoint,
shape = new long[] {batchSize, 1},
data = m_Allocator.Alloc(new TensorShape(batchSize, 1))
};
Eval(actionProbs, outputTensor, m_Multinomial);
for (var ii = 0; ii < batchSize; ii++)
{
actions[ii, actionIndex] = outputTensor.data[ii, 0];
}
actionProbs.data.Dispose();
outputTensor.data.Dispose();
}
var agentIndex = 0;
foreach (var agent in agentInfo.Keys)
{
var action = new float[m_ActionSize.Length];
for (var j = 0; j < m_ActionSize.Length; j++)
{
action[j] = actions[agentIndex, j];
}
agent.UpdateVectorAction(action);
agentIndex++;
}
}
/// <summary>
/// Draw samples from a multinomial distribution based on log-probabilities specified
/// in tensor src. The samples will be saved in the dst tensor.
/// </summary>
/// <param name="src">2-D tensor with shape batch_size x num_classes</param>
/// <param name="dst">Allocated tensor with size batch_size x num_samples</param>
/// <param name="multinomial">Multinomial object used to sample values</param>
/// <exception cref="NotImplementedException">
/// Multinomial doesn't support integer tensors
/// </exception>
/// <exception cref="ArgumentException">Issue with tensor shape or type</exception>
/// <exception cref="ArgumentNullException">
/// At least one of the tensors is not allocated
/// </exception>
public static void Eval(TensorProxy src, TensorProxy dst, Multinomial multinomial)
{
if (src.DataType != typeof(float))
{
throw new NotImplementedException("Only float tensors are currently supported");
}
if (src.valueType != dst.valueType)
{
throw new ArgumentException(
"Source and destination tensors have different types!");
}
if (src.data == null || dst.data == null)
{
throw new ArgumentNullException();
}
if (src.data.batch != dst.data.batch)
{
throw new ArgumentException("Batch size for input and output data is different!");
}
var cdf = new float[src.data.channels];
for (var batch = 0; batch < src.data.batch; ++batch)
{
// Find the class maximum
var maxProb = float.NegativeInfinity;
for (var cls = 0; cls < src.data.channels; ++cls)
{
maxProb = Mathf.Max(src.data[batch, cls], maxProb);
}
// Sum the log probabilities and compute CDF
var sumProb = 0.0f;
for (var cls = 0; cls < src.data.channels; ++cls)
{
sumProb += Mathf.Exp(src.data[batch, cls] - maxProb);
cdf[cls] = sumProb;
}
// Generate the samples
for (var sample = 0; sample < dst.data.channels; ++sample)
{
dst.data[batch, sample] = multinomial.Sample(cdf);
}
}
}
}
public class BarracudaMemoryOutputApplier : TensorApplier.IApplier
{
private readonly int m_MemoriesCount;
private readonly int m_MemoryIndex;
public BarracudaMemoryOutputApplier(int memoriesCount, int memoryIndex)
{
m_MemoriesCount = memoriesCount;
m_MemoryIndex = memoryIndex;
}
public void Apply(TensorProxy tensorProxy, Dictionary<Agent, AgentInfo> agentInfo)
{
var agentIndex = 0;
var memorySize = (int)tensorProxy.shape[tensorProxy.shape.Length - 1];
foreach (var agent in agentInfo.Keys)
{
var memory = agent.GetMemoriesAction();
if (memory == null || memory.Count < memorySize * m_MemoriesCount)
{
memory = new List<float>();
memory.AddRange(Enumerable.Repeat(0f, memorySize * m_MemoriesCount));
}
for (var j = 0; j < memorySize; j++)
{
memory[memorySize * m_MemoryIndex + j] = tensorProxy.data[agentIndex, j];
}
agent.UpdateMemoriesAction(memory);
agentIndex++;
}
}
}
/// <summary>
/// The Applier for the Memory output tensor. Tensor is assumed to contain the new
/// memory data of the agents in the batch.
/// </summary>
public class MemoryOutputApplier : TensorApplier.IApplier
{
public void Apply(TensorProxy tensorProxy, Dictionary<Agent, AgentInfo> agentInfo)
{
var agentIndex = 0;
var memorySize = tensorProxy.shape[tensorProxy.shape.Length - 1];
foreach (var agent in agentInfo.Keys)
{
var memory = new List<float>();
for (var j = 0; j < memorySize; j++)
{
memory.Add(tensorProxy.data[agentIndex, j]);
}
agent.UpdateMemoriesAction(memory);
agentIndex++;
}
}
}
/// <summary>
/// The Applier for the Value Estimate output tensor. Tensor is assumed to contain the
/// value estimates of the agents in the batch.
/// </summary>
public class ValueEstimateApplier : TensorApplier.IApplier
{
public void Apply(TensorProxy tensorProxy, Dictionary<Agent, AgentInfo> agentInfo)
{
var agentIndex = 0;
foreach (var agent in agentInfo.Keys)
{
agent.UpdateValueAction(tensorProxy.data[agentIndex, 0]);
agentIndex++;
}
}
}
}
| 250 |
ml-agents | openai | C# | using System;
using System.Collections.Generic;
using System.Linq;
using Barracuda;
namespace MLAgents.InferenceBrain
{
/// <summary>
/// Prepares the Tensors for the Learning Brain and exposes a list of failed checks if Model
/// and BrainParameters are incompatible.
/// </summary>
public class BarracudaModelParamLoader
{
private enum ModelActionType
{
Unknown,
Discrete,
Continuous
}
private const long k_ApiVersion = 2;
private readonly IWorker m_Engine;
private readonly Model m_Model;
private readonly BrainParameters m_BrainParameters;
private readonly List<string> m_FailedModelChecks = new List<string>();
/// <summary>
/// Factory for the ModelParamLoader : Creates a ModelParamLoader and runs the checks
/// on it.
/// </summary>
/// <param name="engine">
/// The Barracuda engine worker we get the parameters and the checks from
/// </param>
/// <param name="model">
/// The Barracuda engine model for loading static parameters
/// </param>
/// <param name="brainParameters">
/// The BrainParameters that are used verify the compatibility with the InferenceEngine
/// </param>
/// <returns></returns>
public static BarracudaModelParamLoader GetLoaderAndCheck(
IWorker engine, Model model, BrainParameters brainParameters)
{
var modelParamLoader = new BarracudaModelParamLoader(engine, model, brainParameters);
modelParamLoader.GenerateChecks();
return modelParamLoader;
}
private BarracudaModelParamLoader(
IWorker engine, Model model, BrainParameters brainParameters)
{
m_Engine = engine;
m_Model = model;
m_BrainParameters = brainParameters;
}
/// <summary>
/// Generates the Tensor inputs that are expected to be present in the Model.
/// </summary>
/// <returns>TensorProxy IEnumerable with the expected Tensor inputs</returns>
public IReadOnlyList<TensorProxy> GetInputTensors()
{
var tensors = new List<TensorProxy>();
if (m_Model == null)
return tensors;
foreach (var input in m_Model.inputs)
{
tensors.Add(new TensorProxy
{
name = input.name,
valueType = TensorProxy.TensorType.FloatingPoint,
data = null,
shape = input.shape.Select(i => (long)i).ToArray()
});
}
foreach (var mem in m_Model.memories)
{
//Debug.Log($"{mem.input}: {mem.shape} -> {BarracudaUtils.TensorShapeFromBarracuda(mem.shape).Length}");
tensors.Add(new TensorProxy
{
name = mem.input,
valueType = TensorProxy.TensorType.FloatingPoint,
data = null,
shape = TensorUtils.TensorShapeFromBarracuda(mem.shape)
});
}
tensors.Sort((el1, el2) => el1.name.CompareTo(el2.name));
return tensors;
}
/// <summary>
/// Generates the Tensor outputs that are expected to be present in the Model.
/// </summary>
/// <returns>TensorProxy IEnumerable with the expected Tensor outputs</returns>
public string[] GetOutputNames()
{
var names = new List<string>();
if (m_Model == null)
{
return names.ToArray();
}
names.Add(TensorNames.ActionOutput);
var memory = GetIntScalar(TensorNames.MemorySize);
if (memory > 0)
{
foreach (var mem in m_Model.memories)
{
names.Add(mem.output);
}
}
names.Sort();
return names.ToArray();
}
/// <summary>
/// Queries the InferenceEngine for the value of a variable in the graph given its name.
/// Only works with int32 Tensors with zero dimensions containing a unique element.
/// If the node was not found or could not be retrieved, the value -1 will be returned.
/// </summary>
/// <param name="name">The name of the Tensor variable</param>
/// <returns>The value of the scalar variable in the model. (-1 if not found)</returns>
private int GetIntScalar(string name)
{
return (int)m_Model.GetTensorByName(name)[0];
}
/// <summary>
/// Retrieves an IEnumerable of string corresponding to the failed compatibility checks
/// between the InferenceEngine and the BrainParameters.
/// </summary>
public IEnumerable<string> GetChecks()
{
return m_FailedModelChecks;
}
/// <summary>
/// Generates the list of failed checks that failed when comparing the data from the Model
/// and from the BrainParameters
/// </summary>
private void GenerateChecks()
{
m_FailedModelChecks.Clear();
if (m_Engine == null)
{
m_FailedModelChecks.Add(
"There is no model for this Brain, cannot run inference. " +
"(But can still train)");
return;
}
var modelApiVersion = GetIntScalar(TensorNames.VersionNumber);
var memorySize = GetIntScalar(TensorNames.MemorySize);
var isContinuousInt = GetIntScalar(TensorNames.IsContinuousControl);
var isContinuous = GetActionType(isContinuousInt);
var actionSize = GetIntScalar(TensorNames.ActionOutputShape);
if (modelApiVersion == -1)
{
m_FailedModelChecks.Add(
"Model was not trained using the right version of ML-Agents. " +
"Cannot use this model.");
return;
}
if (modelApiVersion != k_ApiVersion)
{
m_FailedModelChecks.Add(
$"Version of the trainer the model was trained with ({modelApiVersion}) " +
$"is not compatible with the Brain's version ({k_ApiVersion}).");
return;
}
CheckIntScalarPresenceHelper(new Dictionary<string, int>()
{
{TensorNames.MemorySize, memorySize},
{TensorNames.IsContinuousControl, isContinuousInt},
{TensorNames.ActionOutputShape, actionSize}
});
CheckInputTensorPresence(memorySize, isContinuous);
CheckOutputTensorPresence(memorySize);
CheckInputTensorShape();
CheckOutputTensorShape(isContinuous, actionSize);
}
/// <summary>
/// Converts the integer value in the model corresponding to the type of control to a
/// ModelActionType.
/// </summary>
/// <param name="isContinuousInt">
/// The integer value in the model indicating the type of control
/// </param>
/// <returns>The equivalent ModelActionType</returns>
private static ModelActionType GetActionType(int isContinuousInt)
{
ModelActionType isContinuous;
switch (isContinuousInt)
{
case 0:
isContinuous = ModelActionType.Discrete;
break;
case 1:
isContinuous = ModelActionType.Continuous;
break;
default:
isContinuous = ModelActionType.Unknown;
break;
}
return isContinuous;
}
/// <summary>
/// Given a Dictionary of node names to int values, create checks if the values have the
/// invalid value of -1.
/// </summary>
/// <param name="requiredScalarFields"> Mapping from node names to int values</param>
private void CheckIntScalarPresenceHelper(Dictionary<string, int> requiredScalarFields)
{
foreach (var field in requiredScalarFields)
{
if (field.Value == -1)
{
m_FailedModelChecks.Add($"Missing node in the model provided : {field.Key}");
}
}
}
/// <summary>
/// Generates failed checks that correspond to inputs expected by the model that are not
/// present in the BrainParameters.
/// </summary>
/// <param name="memory">
/// The memory size that the model is expecting.
/// </param>
/// <param name="isContinuous">
/// Whether the model is expecting continuous or discrete control.
/// </param>
/// <returns>
/// A IEnumerable of string corresponding to the failed input presence checks.
/// </returns>
private void CheckInputTensorPresence(int memory, ModelActionType isContinuous)
{
var tensorsNames = GetInputTensors().Select(x => x.name).ToList();
// If there is no Vector Observation Input but the Brain Parameters expect one.
if ((m_BrainParameters.vectorObservationSize != 0) &&
(!tensorsNames.Contains(TensorNames.VectorObservationPlacholder)))
{
m_FailedModelChecks.Add(
"The model does not contain a Vector Observation Placeholder Input. " +
"You must set the Vector Observation Space Size to 0.");
}
// If there are not enough Visual Observation Input compared to what the
// Brain Parameters expect.
for (var visObsIndex = 0;
visObsIndex < m_BrainParameters.cameraResolutions.Length;
visObsIndex++)
{
if (!tensorsNames.Contains(
TensorNames.VisualObservationPlaceholderPrefix + visObsIndex))
{
m_FailedModelChecks.Add(
"The model does not contain a Visual Observation Placeholder Input " +
"for visual observation " + visObsIndex + ".");
}
}
// If the model has a non-negative memory size but requires a recurrent input
if (memory > 0)
{
if (!tensorsNames.Any(x => x.EndsWith("_h")) ||
!tensorsNames.Any(x => x.EndsWith("_c")))
{
m_FailedModelChecks.Add(
"The model does not contain a Recurrent Input Node but has memory_size.");
}
}
// If the model uses discrete control but does not have an input for action masks
if (isContinuous == ModelActionType.Discrete)
{
if (!tensorsNames.Contains(TensorNames.ActionMaskPlaceholder))
{
m_FailedModelChecks.Add(
"The model does not contain an Action Mask but is using Discrete Control.");
}
}
}
/// <summary>
/// Generates failed checks that correspond to outputs expected by the model that are not
/// present in the BrainParameters.
/// </summary>
/// <param name="memory">The memory size that the model is expecting/</param>
/// <returns>
/// A IEnumerable of string corresponding to the failed output presence checks.
/// </returns>
private void CheckOutputTensorPresence(int memory)
{
// If there is no Action Output.
if (!m_Model.outputs.Contains(TensorNames.ActionOutput))
{
m_FailedModelChecks.Add("The model does not contain an Action Output Node.");
}
// If there is no Recurrent Output but the model is Recurrent.
if (memory > 0)
{
var memOutputs = m_Model.memories.Select(x => x.output).ToList();
if (!memOutputs.Any(x => x.EndsWith("_h")) ||
!memOutputs.Any(x => x.EndsWith("_c")))
{
m_FailedModelChecks.Add(
"The model does not contain a Recurrent Output Node but has memory_size.");
}
}
}
/// <summary>
/// Generates failed checks that correspond to inputs shapes incompatibilities between
/// the model and the BrainParameters.
/// </summary>
private void CheckInputTensorShape()
{
var tensorTester =
new Dictionary<string, Func<TensorProxy, string>>()
{
{TensorNames.VectorObservationPlacholder, CheckVectorObsShape},
{TensorNames.PreviousActionPlaceholder, CheckPreviousActionShape},
{TensorNames.RandomNormalEpsilonPlaceholder, ((tensor) => null)},
{TensorNames.ActionMaskPlaceholder, ((tensor) => null)},
{TensorNames.SequenceLengthPlaceholder, ((tensor) => null)},
{TensorNames.RecurrentInPlaceholder, ((tensor) => null)},
};
foreach (var mem in m_Model.memories)
{
tensorTester[mem.input] = ((tensor) => null);
}
for (var obsIndex = 0; obsIndex < m_BrainParameters.cameraResolutions.Length; obsIndex++)
{
var index = obsIndex;
tensorTester[TensorNames.VisualObservationPlaceholderPrefix + obsIndex] =
(tensor) => CheckVisualObsShape(tensor, index);
}
// If the model expects an input but it is not in this list
foreach (var tensor in GetInputTensors())
{
if (!tensorTester.ContainsKey(tensor.name))
{
m_FailedModelChecks.Add(
"Model requires an unknown input named : " + tensor.name);
}
else
{
var tester = tensorTester[tensor.name];
var error = tester.Invoke(tensor);
if (error != null)
{
m_FailedModelChecks.Add(error);
}
}
}
}
/// <summary>
/// Checks that the shape of the Vector Observation input placeholder is the same in the
/// model and in the Brain Parameters.
/// </summary>
/// <param name="tensorProxy">The tensor that is expected by the model</param>
/// <returns>
/// If the Check failed, returns a string containing information about why the
/// check failed. If the check passed, returns null.
/// </returns>
private string CheckVectorObsShape(TensorProxy tensorProxy)
{
var vecObsSizeBp = m_BrainParameters.vectorObservationSize;
var numStackedVector = m_BrainParameters.numStackedVectorObservations;
var totalVecObsSizeT = tensorProxy.shape[tensorProxy.shape.Length - 1];
if (vecObsSizeBp * numStackedVector != totalVecObsSizeT)
{
return "Vector Observation Size of the model does not match. Received " +
$"{vecObsSizeBp} x {numStackedVector} but was expecting {totalVecObsSizeT}.";
}
return null;
}
/// <summary>
/// Checks that the shape of the Previous Vector Action input placeholder is the same in the
/// model and in the Brain Parameters.
/// </summary>
/// <param name="tensorProxy"> The tensor that is expected by the model</param>
/// <returns>If the Check failed, returns a string containing information about why the
/// check failed. If the check passed, returns null.</returns>
private string CheckPreviousActionShape(TensorProxy tensorProxy)
{
var numberActionsBp = m_BrainParameters.vectorActionSize.Length;
var numberActionsT = tensorProxy.shape[tensorProxy.shape.Length - 1];
if (numberActionsBp != numberActionsT)
{
return "Previous Action Size of the model does not match. " +
$"Received {numberActionsBp} but was expecting {numberActionsT}.";
}
return null;
}
/// <summary>
/// Checks that the shape of the visual observation input placeholder is the same in the
/// model and in the Brain Parameters.
/// </summary>
/// <param name="tensorProxy">The tensor that is expected by the model</param>
/// <param name="visObsIndex">The index of the visual observation.</param>
/// <returns>
/// If the Check failed, returns a string containing information about why the
/// check failed. If the check passed, returns null.
/// </returns>
private string CheckVisualObsShape(TensorProxy tensorProxy, int visObsIndex)
{
var resolutionBp = m_BrainParameters.cameraResolutions[visObsIndex];
var widthBp = resolutionBp.width;
var heightBp = resolutionBp.height;
var pixelBp = resolutionBp.blackAndWhite ? 1 : 3;
var heightT = tensorProxy.shape[1];
var widthT = tensorProxy.shape[2];
var pixelT = tensorProxy.shape[3];
if ((widthBp != widthT) || (heightBp != heightT) || (pixelBp != pixelT))
{
return $"The visual Observation {visObsIndex} of the model does not match. " +
$"Received TensorProxy of shape [?x{widthBp}x{heightBp}x{pixelBp}] but " +
$"was expecting [?x{widthT}x{heightT}x{pixelT}].";
}
return null;
}
/// <summary>
/// Generates failed checks that correspond to output shapes incompatibilities between
/// the model and the BrainParameters.
/// </summary>
/// <param name="isContinuous">
/// Whether the model is expecting continuous or discrete control.
/// </param>
/// <param name="modelActionSize">
/// The size of the action output that is expected by the model.
/// </param>
/// <returns>
/// A IEnumerable of string corresponding to the incompatible shapes between model
/// and BrainParameters.
/// </returns>
private void CheckOutputTensorShape(ModelActionType isContinuous, int modelActionSize)
{
if (isContinuous == ModelActionType.Unknown)
{
m_FailedModelChecks.Add("Cannot infer type of Control from the provided model.");
return;
}
if (isContinuous == ModelActionType.Continuous &&
m_BrainParameters.vectorActionSpaceType != SpaceType.Continuous)
{
m_FailedModelChecks.Add(
"Model has been trained using Continuous Control but the Brain Parameters " +
"suggest Discrete Control.");
return;
}
if (isContinuous == ModelActionType.Discrete &&
m_BrainParameters.vectorActionSpaceType != SpaceType.Discrete)
{
m_FailedModelChecks.Add(
"Model has been trained using Discrete Control but the Brain Parameters " +
"suggest Continuous Control.");
return;
}
var tensorTester = new Dictionary<string, Func<TensorShape, int, string>>();
if (m_BrainParameters.vectorActionSpaceType == SpaceType.Continuous)
{
tensorTester[TensorNames.ActionOutput] = CheckContinuousActionOutputShape;
}
else
{
tensorTester[TensorNames.ActionOutput] = CheckDiscreteActionOutputShape;
}
// If the model expects an output but it is not in this list
foreach (var name in m_Model.outputs)
{
if (tensorTester.ContainsKey(name))
{
var tester = tensorTester[name];
var error = tester.Invoke(m_Model.GetShapeByName(name), modelActionSize);
if (error != null)
{
m_FailedModelChecks.Add(error);
}
}
}
}
/// <summary>
/// Checks that the shape of the discrete action output is the same in the
/// model and in the Brain Parameters.
/// </summary>
/// <param name="shape"> The tensor shape that is expected by the model</param>
/// <param name="modelActionSize">
/// The size of the action output that is expected by the model.
/// </param>
/// <returns>
/// If the Check failed, returns a string containing information about why the
/// check failed. If the check passed, returns null.
/// </returns>
private string CheckDiscreteActionOutputShape(TensorShape shape, int modelActionSize)
{
var bpActionSize = m_BrainParameters.vectorActionSize.Sum();
if (modelActionSize != bpActionSize)
{
return "Action Size of the model does not match. The BrainParameters expect " +
$"{bpActionSize} but the model contains {modelActionSize}.";
}
return null;
}
/// <summary>
/// Checks that the shape of the continuous action output is the same in the
/// model and in the Brain Parameters.
/// </summary>
/// <param name="shape"> The tensor shape that is expected by the model</param>
/// <param name="modelActionSize">
/// The size of the action output that is expected by the model.
/// </param>
/// <returns>If the Check failed, returns a string containing information about why the
/// check failed. If the check passed, returns null.</returns>
private string CheckContinuousActionOutputShape(TensorShape shape, int modelActionSize)
{
var bpActionSize = m_BrainParameters.vectorActionSize[0];
if (modelActionSize != bpActionSize)
{
return "Action Size of the model does not match. The BrainParameters expect " +
$"{bpActionSize} but the model contains {modelActionSize}.";
}
return null;
}
}
}
| 549 |
ml-agents | openai | C# | using System.Collections.Generic;
using System;
using System.Linq;
using Barracuda;
using MLAgents.InferenceBrain.Utils;
namespace MLAgents.InferenceBrain
{
/// <summary>
/// Reshapes a Tensor so that its first dimension becomes equal to the current batch size
/// and initializes its content to be zeros. Will only work on 2-dimensional tensors.
/// The second dimension of the Tensor will not be modified.
/// </summary>
public class BiDimensionalOutputGenerator : TensorGenerator.IGenerator
{
private readonly ITensorAllocator m_Allocator;
public BiDimensionalOutputGenerator(ITensorAllocator allocator)
{
m_Allocator = allocator;
}
public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary<Agent, AgentInfo> agentInfo)
{
TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
}
}
/// <summary>
/// Generates the Tensor corresponding to the BatchSize input : Will be a one dimensional
/// integer array of size 1 containing the batch size.
/// </summary>
public class BatchSizeGenerator : TensorGenerator.IGenerator
{
private readonly ITensorAllocator m_Allocator;
public BatchSizeGenerator(ITensorAllocator allocator)
{
m_Allocator = allocator;
}
public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary<Agent, AgentInfo> agentInfo)
{
tensorProxy.data?.Dispose();
tensorProxy.data = m_Allocator.Alloc(new TensorShape(1, 1));
tensorProxy.data[0] = batchSize;
}
}
/// <summary>
/// Generates the Tensor corresponding to the SequenceLength input : Will be a one
/// dimensional integer array of size 1 containing 1.
/// Note : the sequence length is always one since recurrent networks only predict for
/// one step at the time.
/// </summary>
public class SequenceLengthGenerator : TensorGenerator.IGenerator
{
private readonly ITensorAllocator m_Allocator;
public SequenceLengthGenerator(ITensorAllocator allocator)
{
m_Allocator = allocator;
}
public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary<Agent, AgentInfo> agentInfo)
{
tensorProxy.shape = new long[0];
tensorProxy.data?.Dispose();
tensorProxy.data = m_Allocator.Alloc(new TensorShape(1, 1));
tensorProxy.data[0] = 1;
}
}
/// <summary>
/// Generates the Tensor corresponding to the VectorObservation input : Will be a two
/// dimensional float array of dimension [batchSize x vectorObservationSize].
/// It will use the Vector Observation data contained in the agentInfo to fill the data
/// of the tensor.
/// </summary>
public class VectorObservationGenerator : TensorGenerator.IGenerator
{
private readonly ITensorAllocator m_Allocator;
public VectorObservationGenerator(ITensorAllocator allocator)
{
m_Allocator = allocator;
}
public void Generate(
TensorProxy tensorProxy, int batchSize, Dictionary<Agent, AgentInfo> agentInfo)
{
TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
var vecObsSizeT = tensorProxy.shape[tensorProxy.shape.Length - 1];
var agentIndex = 0;
foreach (var agent in agentInfo.Keys)
{
var vectorObs = agentInfo[agent].stackedVectorObservation;
for (var j = 0; j < vecObsSizeT; j++)
{
tensorProxy.data[agentIndex, j] = vectorObs[j];
}
agentIndex++;
}
}
}
/// <summary>
/// Generates the Tensor corresponding to the Recurrent input : Will be a two
/// dimensional float array of dimension [batchSize x memorySize].
/// It will use the Memory data contained in the agentInfo to fill the data
/// of the tensor.
/// </summary>
public class RecurrentInputGenerator : TensorGenerator.IGenerator
{
private readonly ITensorAllocator m_Allocator;
public RecurrentInputGenerator(ITensorAllocator allocator)
{
m_Allocator = allocator;
}
public void Generate(
TensorProxy tensorProxy, int batchSize, Dictionary<Agent, AgentInfo> agentInfo)
{
TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
var memorySize = tensorProxy.shape[tensorProxy.shape.Length - 1];
var agentIndex = 0;
foreach (var agent in agentInfo.Keys)
{
var memory = agentInfo[agent].memories;
if (memory == null)
{
agentIndex++;
continue;
}
for (var j = 0; j < Math.Min(memorySize, memory.Count); j++)
{
if (j >= memory.Count)
{
break;
}
tensorProxy.data[agentIndex, j] = memory[j];
}
agentIndex++;
}
}
}
public class BarracudaRecurrentInputGenerator : TensorGenerator.IGenerator
{
private int m_MemoriesCount;
private readonly int m_MemoryIndex;
private readonly ITensorAllocator m_Allocator;
public BarracudaRecurrentInputGenerator(int memoryIndex, ITensorAllocator allocator)
{
m_MemoryIndex = memoryIndex;
m_Allocator = allocator;
}
public void Generate(
TensorProxy tensorProxy, int batchSize, Dictionary<Agent, AgentInfo> agentInfo)
{
TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
var memorySize = (int)tensorProxy.shape[tensorProxy.shape.Length - 1];
var agentIndex = 0;
foreach (var agent in agentInfo.Keys)
{
var memory = agentInfo[agent].memories;
var offset = memorySize * m_MemoryIndex;
if (memory == null)
{
agentIndex++;
continue;
}
for (var j = 0; j < memorySize; j++)
{
if (j >= memory.Count)
{
break;
}
tensorProxy.data[agentIndex, j] = memory[j + offset];
}
agentIndex++;
}
}
}
/// <summary>
/// Generates the Tensor corresponding to the Previous Action input : Will be a two
/// dimensional integer array of dimension [batchSize x actionSize].
/// It will use the previous action data contained in the agentInfo to fill the data
/// of the tensor.
/// </summary>
public class PreviousActionInputGenerator : TensorGenerator.IGenerator
{
private readonly ITensorAllocator m_Allocator;
public PreviousActionInputGenerator(ITensorAllocator allocator)
{
m_Allocator = allocator;
}
public void Generate(
TensorProxy tensorProxy, int batchSize, Dictionary<Agent, AgentInfo> agentInfo)
{
TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
var actionSize = tensorProxy.shape[tensorProxy.shape.Length - 1];
var agentIndex = 0;
foreach (var agent in agentInfo.Keys)
{
var pastAction = agentInfo[agent].storedVectorActions;
for (var j = 0; j < actionSize; j++)
{
tensorProxy.data[agentIndex, j] = pastAction[j];
}
agentIndex++;
}
}
}
/// <summary>
/// Generates the Tensor corresponding to the Action Mask input : Will be a two
/// dimensional float array of dimension [batchSize x numActionLogits].
/// It will use the Action Mask data contained in the agentInfo to fill the data
/// of the tensor.
/// </summary>
public class ActionMaskInputGenerator : TensorGenerator.IGenerator
{
private readonly ITensorAllocator m_Allocator;
public ActionMaskInputGenerator(ITensorAllocator allocator)
{
m_Allocator = allocator;
}
public void Generate(
TensorProxy tensorProxy, int batchSize, Dictionary<Agent, AgentInfo> agentInfo)
{
TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
var maskSize = tensorProxy.shape[tensorProxy.shape.Length - 1];
var agentIndex = 0;
foreach (var agent in agentInfo.Keys)
{
var maskList = agentInfo[agent].actionMasks;
for (var j = 0; j < maskSize; j++)
{
var isUnmasked = (maskList != null && maskList[j]) ? 0.0f : 1.0f;
tensorProxy.data[agentIndex, j] = isUnmasked;
}
agentIndex++;
}
}
}
/// <summary>
/// Generates the Tensor corresponding to the Epsilon input : Will be a two
/// dimensional float array of dimension [batchSize x actionSize].
/// It will use the generate random input data from a normal Distribution.
/// </summary>
public class RandomNormalInputGenerator : TensorGenerator.IGenerator
{
private readonly RandomNormal m_RandomNormal;
private readonly ITensorAllocator m_Allocator;
public RandomNormalInputGenerator(int seed, ITensorAllocator allocator)
{
m_RandomNormal = new RandomNormal(seed);
m_Allocator = allocator;
}
public void Generate(
TensorProxy tensorProxy, int batchSize, Dictionary<Agent, AgentInfo> agentInfo)
{
TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
TensorUtils.FillTensorWithRandomNormal(tensorProxy, m_RandomNormal);
}
}
/// <summary>
/// Generates the Tensor corresponding to the Visual Observation input : Will be a 4
/// dimensional float array of dimension [batchSize x width x height x numChannels].
/// It will use the Texture input data contained in the agentInfo to fill the data
/// of the tensor.
/// </summary>
public class VisualObservationInputGenerator : TensorGenerator.IGenerator
{
private readonly int m_Index;
private readonly bool m_GrayScale;
private readonly ITensorAllocator m_Allocator;
public VisualObservationInputGenerator(
int index, bool grayScale, ITensorAllocator allocator)
{
m_Index = index;
m_GrayScale = grayScale;
m_Allocator = allocator;
}
public void Generate(
TensorProxy tensorProxy, int batchSize, Dictionary<Agent, AgentInfo> agentInfo)
{
var textures = agentInfo.Keys.Select(
agent => agentInfo[agent].visualObservations[m_Index]).ToList();
TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator);
Utilities.TextureToTensorProxy(textures, tensorProxy, m_GrayScale);
}
}
}
| 318 |
ml-agents | openai | C# | using System.Collections.Generic;
using Barracuda;
namespace MLAgents.InferenceBrain
{
/// <summary>
/// Mapping between the output tensor names and the method that will use the
/// output tensors and the Agents present in the batch to update their action, memories and
/// value estimates.
/// A TensorApplier implements a Dictionary of strings (node names) to an Action.
/// This action takes as input the tensor and the Dictionary of Agent to AgentInfo for
/// the current batch.
/// </summary>
public class TensorApplier
{
/// <summary>
/// A tensor Applier's Execute method takes a tensor and a Dictionary of Agent to AgentInfo.
/// Uses the data contained inside the tensor to modify the state of the Agent. The Tensors
/// are assumed to have the batch size on the first dimension and the agents to be ordered
/// the same way in the dictionary and in the tensor.
/// </summary>
public interface IApplier
{
/// <summary>
/// Applies the values in the Tensor to the Agents present in the agentInfos
/// </summary>
/// <param name="tensorProxy">
/// The Tensor containing the data to be applied to the Agents
/// </param>
/// <param name="agentInfo">
/// Dictionary of Agents to AgentInfo that will receive
/// the values of the Tensor.
/// </param>
void Apply(TensorProxy tensorProxy, Dictionary<Agent, AgentInfo> agentInfo);
}
private readonly Dictionary<string, IApplier> m_Dict = new Dictionary<string, IApplier>();
/// <summary>
/// Returns a new TensorAppliers object.
/// </summary>
/// <param name="bp"> The BrainParameters used to determine what Appliers will be
/// used</param>
/// <param name="seed"> The seed the Appliers will be initialized with.</param>
/// <param name="allocator"> Tensor allocator</param>
/// <param name="barracudaModel"></param>
public TensorApplier(
BrainParameters bp, int seed, ITensorAllocator allocator, object barracudaModel = null)
{
m_Dict[TensorNames.ValueEstimateOutput] = new ValueEstimateApplier();
if (bp.vectorActionSpaceType == SpaceType.Continuous)
{
m_Dict[TensorNames.ActionOutput] = new ContinuousActionOutputApplier();
}
else
{
m_Dict[TensorNames.ActionOutput] =
new DiscreteActionOutputApplier(bp.vectorActionSize, seed, allocator);
}
m_Dict[TensorNames.RecurrentOutput] = new MemoryOutputApplier();
if (barracudaModel != null)
{
var model = (Model)barracudaModel;
for (var i = 0; i < model?.memories.Length; i++)
{
m_Dict[model.memories[i].output] =
new BarracudaMemoryOutputApplier(model.memories.Length, i);
}
}
}
/// <summary>
/// Updates the state of the agents based on the data present in the tensor.
/// </summary>
/// <param name="tensors"> Enumerable of tensors containing the data.</param>
/// <param name="agentInfos"> Dictionary of Agent to AgentInfo that contains the
/// Agents that will be updated using the tensor's data</param>
/// <exception cref="UnityAgentsException"> One of the tensor does not have an
/// associated applier.</exception>
public void ApplyTensors(
IEnumerable<TensorProxy> tensors, Dictionary<Agent, AgentInfo> agentInfos)
{
foreach (var tensor in tensors)
{
if (!m_Dict.ContainsKey(tensor.name))
{
throw new UnityAgentsException(
$"Unknown tensorProxy expected as output : {tensor.name}");
}
m_Dict[tensor.name].Apply(tensor, agentInfos);
}
}
}
}
| 97 |
ml-agents | openai | C# | using System.Collections.Generic;
using Barracuda;
namespace MLAgents.InferenceBrain
{
/// <summary>
/// Mapping between Tensor names and generators.
/// A TensorGenerator implements a Dictionary of strings (node names) to an Action.
/// The Action take as argument the tensor, the current batch size and a Dictionary of
/// Agent to AgentInfo corresponding to the current batch.
/// Each Generator reshapes and fills the data of the tensor based of the data of the batch.
/// When the TensorProxy is an Input to the model, the shape of the Tensor will be modified
/// depending on the current batch size and the data of the Tensor will be filled using the
/// Dictionary of Agent to AgentInfo.
/// When the TensorProxy is an Output of the model, only the shape of the Tensor will be
/// modified using the current batch size. The data will be pre-filled with zeros.
/// </summary>
public class TensorGenerator
{
public interface IGenerator
{
/// <summary>
/// Modifies the data inside a Tensor according to the information contained in the
/// AgentInfos contained in the current batch.
/// </summary>
/// <param name="tensorProxy"> The tensor the data and shape will be modified</param>
/// <param name="batchSize"> The number of agents present in the current batch</param>
/// <param name="agentInfo"> Dictionary of Agent to AgentInfo containing the
/// information that will be used to populate the tensor's data</param>
void Generate(
TensorProxy tensorProxy, int batchSize, Dictionary<Agent, AgentInfo> agentInfo);
}
private readonly Dictionary<string, IGenerator> m_Dict = new Dictionary<string, IGenerator>();
/// <summary>
/// Returns a new TensorGenerators object.
/// </summary>
/// <param name="bp"> The BrainParameters used to determine what Generators will be
/// used</param>
/// <param name="seed"> The seed the Generators will be initialized with.</param>
/// <param name="allocator"> Tensor allocator</param>
/// <param name="barracudaModel"></param>
public TensorGenerator(
BrainParameters bp, int seed, ITensorAllocator allocator, object barracudaModel = null)
{
// Generator for Inputs
m_Dict[TensorNames.BatchSizePlaceholder] =
new BatchSizeGenerator(allocator);
m_Dict[TensorNames.SequenceLengthPlaceholder] =
new SequenceLengthGenerator(allocator);
m_Dict[TensorNames.VectorObservationPlacholder] =
new VectorObservationGenerator(allocator);
m_Dict[TensorNames.RecurrentInPlaceholder] =
new RecurrentInputGenerator(allocator);
if (barracudaModel != null)
{
var model = (Model)barracudaModel;
for (var i = 0; i < model?.memories.Length; i++)
{
m_Dict[model.memories[i].input] =
new BarracudaRecurrentInputGenerator(i, allocator);
}
}
m_Dict[TensorNames.PreviousActionPlaceholder] =
new PreviousActionInputGenerator(allocator);
m_Dict[TensorNames.ActionMaskPlaceholder] =
new ActionMaskInputGenerator(allocator);
m_Dict[TensorNames.RandomNormalEpsilonPlaceholder] =
new RandomNormalInputGenerator(seed, allocator);
if (bp.cameraResolutions != null)
{
for (var visIndex = 0;
visIndex < bp.cameraResolutions.Length;
visIndex++)
{
var index = visIndex;
var bw = bp.cameraResolutions[visIndex].blackAndWhite;
m_Dict[TensorNames.VisualObservationPlaceholderPrefix + visIndex] =
new VisualObservationInputGenerator(index, bw, allocator);
}
}
// Generators for Outputs
m_Dict[TensorNames.ActionOutput] = new BiDimensionalOutputGenerator(allocator);
m_Dict[TensorNames.RecurrentOutput] = new BiDimensionalOutputGenerator(allocator);
m_Dict[TensorNames.ValueEstimateOutput] = new BiDimensionalOutputGenerator(allocator);
}
/// <summary>
/// Populates the data of the tensor inputs given the data contained in the current batch
/// of agents.
/// </summary>
/// <param name="tensors"> Enumerable of tensors that will be modified.</param>
/// <param name="currentBatchSize"> The number of agents present in the current batch
/// </param>
/// <param name="agentInfos"> Dictionary of Agent to AgentInfo that contains the
/// data that will be used to modify the tensors</param>
/// <exception cref="UnityAgentsException"> One of the tensor does not have an
/// associated generator.</exception>
public void GenerateTensors(
IEnumerable<TensorProxy> tensors,
int currentBatchSize,
Dictionary<Agent, AgentInfo> agentInfos)
{
foreach (var tensor in tensors)
{
if (!m_Dict.ContainsKey(tensor.name))
{
throw new UnityAgentsException(
$"Unknown tensorProxy expected as input : {tensor.name}");
}
m_Dict[tensor.name].Generate(tensor, currentBatchSize, agentInfos);
}
}
}
}
| 120 |
ml-agents | openai | C# | namespace MLAgents.InferenceBrain
{
/// <summary>
/// Contains the names of the input and output tensors for the Inference Brain.
/// </summary>
public static class TensorNames
{
public const string BatchSizePlaceholder = "batch_size";
public const string SequenceLengthPlaceholder = "sequence_length";
public const string VectorObservationPlacholder = "vector_observation";
public const string RecurrentInPlaceholder = "recurrent_in";
public const string recurrentInPlaceholderH = "recurrent_in_h";
public const string recurrentInPlaceholderC = "recurrent_in_c";
public const string VisualObservationPlaceholderPrefix = "visual_observation_";
public const string PreviousActionPlaceholder = "prev_action";
public const string ActionMaskPlaceholder = "action_masks";
public const string RandomNormalEpsilonPlaceholder = "epsilon";
public const string ValueEstimateOutput = "value_estimate";
public const string RecurrentOutput = "recurrent_out";
public const string recurrentOutputH = "recurrent_out_h";
public const string recurrentOutputC = "recurrent_out_c";
public const string MemorySize = "memory_size";
public const string VersionNumber = "version_number";
public const string IsContinuousControl = "is_continuous_control";
public const string ActionOutputShape = "action_output_shape";
public const string ActionOutput = "action";
}
}
| 30 |
ml-agents | openai | C# | using System;
using System.Collections.Generic;
using Barracuda;
using MLAgents.InferenceBrain.Utils;
namespace MLAgents.InferenceBrain
{
/// <summary>
/// Tensor - A class to encapsulate a Tensor used for inference.
///
/// This class contains the Array that holds the data array, the shapes, type and the
/// placeholder in the execution graph. All the fields are editable in the inspector,
/// allowing the user to specify everything but the data in a graphical way.
/// </summary>
[Serializable]
public class TensorProxy
{
public enum TensorType
{
Integer,
FloatingPoint
};
private static readonly Dictionary<TensorType, Type> k_TypeMap =
new Dictionary<TensorType, Type>()
{
{TensorType.FloatingPoint, typeof(float)},
{TensorType.Integer, typeof(int)}
};
public string name;
public TensorType valueType;
// Since Type is not serializable, we use the DisplayType for the Inspector
public Type DataType => k_TypeMap[valueType];
public long[] shape;
public Tensor data;
}
public static class TensorUtils
{
public static void ResizeTensor(TensorProxy tensor, int batch, ITensorAllocator allocator)
{
if (tensor.shape[0] == batch &&
tensor.data != null && tensor.data.batch == batch)
{
return;
}
tensor.data?.Dispose();
tensor.shape[0] = batch;
if (tensor.shape.Length == 4)
{
tensor.data = allocator.Alloc(
new TensorShape(
batch,
(int)tensor.shape[1],
(int)tensor.shape[2],
(int)tensor.shape[3]));
}
else
{
tensor.data = allocator.Alloc(
new TensorShape(
batch,
(int)tensor.shape[tensor.shape.Length - 1]));
}
}
internal static long[] TensorShapeFromBarracuda(TensorShape src)
{
if (src.height == 1 && src.width == 1)
{
return new long[] {src.batch, src.channels};
}
return new long[] {src.batch, src.height, src.width, src.channels};
}
public static TensorProxy TensorProxyFromBarracuda(Tensor src, string nameOverride = null)
{
var shape = TensorShapeFromBarracuda(src.shape);
return new TensorProxy
{
name = nameOverride ?? src.name,
valueType = TensorProxy.TensorType.FloatingPoint,
shape = shape,
data = src
};
}
/// <summary>
/// Fill a pre-allocated Tensor with random numbers
/// </summary>
/// <param name="tensorProxy">The pre-allocated Tensor to fill</param>
/// <param name="randomNormal">RandomNormal object used to populate tensor</param>
/// <exception cref="NotImplementedException">
/// Throws when trying to fill a Tensor of type other than float
/// </exception>
/// <exception cref="ArgumentNullException">
/// Throws when the Tensor is not allocated
/// </exception>
public static void FillTensorWithRandomNormal(
TensorProxy tensorProxy, RandomNormal randomNormal)
{
if (tensorProxy.DataType != typeof(float))
{
throw new NotImplementedException("Only float data types are currently supported");
}
if (tensorProxy.data == null)
{
throw new ArgumentNullException();
}
for (var i = 0; i < tensorProxy.data.length; i++)
{
tensorProxy.data[i] = (float)randomNormal.NextDouble();
}
}
}
}
| 124 |
ml-agents | openai | C# | namespace MLAgents.InferenceBrain.Utils
{
/// <summary>
/// Multinomial - Draws samples from a multinomial distribution given a (potentially unscaled)
/// cumulative mass function (CMF). This means that the CMF need not "end" with probability
/// mass of 1.0. For instance: [0.1, 0.2, 0.5] is a valid (unscaled). What is important is
/// that it is a cumulative function, not a probability function. In other words,
/// entry[i] = P(x \le i), NOT P(i - 1 \le x \lt i).
/// (\le stands for less than or equal to while \lt is strictly less than).
/// </summary>
public class Multinomial
{
private readonly System.Random m_Random;
/// <summary>
/// Constructor.
/// </summary>
/// <param name="seed">
/// Seed for the random number generator used in the sampling process.
/// </param>
public Multinomial(int seed)
{
m_Random = new System.Random(seed);
}
/// <summary>
/// Samples from the Multinomial distribution defined by the provided cumulative
/// mass function.
/// </summary>
/// <param name="cmf">
/// Cumulative mass function, which may be unscaled. The entries in this array need
/// to be monotonic (always increasing). If the CMF is scaled, then the last entry in
/// the array will be 1.0.
/// </param>
/// <returns>A sampled index from the CMF ranging from 0 to cmf.Length-1.</returns>
public int Sample(float[] cmf)
{
var p = (float)m_Random.NextDouble() * cmf[cmf.Length - 1];
var cls = 0;
while (cmf[cls] < p)
{
++cls;
}
return cls;
}
}
}
| 49 |
ml-agents | openai | C# | using System;
namespace MLAgents.InferenceBrain.Utils
{
/// <summary>
/// RandomNormal - A random number generator that produces normally distributed random
/// numbers using the Marsaglia polar method:
/// https://en.wikipedia.org/wiki/Marsaglia_polar_method
/// TODO: worth overriding System.Random instead of aggregating?
/// </summary>
public class RandomNormal
{
private readonly double m_Mean;
private readonly double m_Stddev;
private readonly Random m_Random;
public RandomNormal(int seed, float mean = 0.0f, float stddev = 1.0f)
{
m_Mean = mean;
m_Stddev = stddev;
m_Random = new Random(seed);
}
// Each iteration produces two numbers. Hold one here for next call
private bool m_HasSpare;
private double m_SpareUnscaled;
/// <summary>
/// Return the next random double number
/// </summary>
/// <returns>Next random double number</returns>
public double NextDouble()
{
if (m_HasSpare)
{
m_HasSpare = false;
return m_SpareUnscaled * m_Stddev + m_Mean;
}
double u, v, s;
do
{
u = m_Random.NextDouble() * 2.0 - 1.0;
v = m_Random.NextDouble() * 2.0 - 1.0;
s = u * u + v * v;
}
while (s >= 1.0 || Math.Abs(s) < double.Epsilon);
s = Math.Sqrt(-2.0 * Math.Log(s) / s);
m_SpareUnscaled = u * s;
m_HasSpare = true;
return v * s * m_Stddev + m_Mean;
}
}
}
| 57 |
orrb | openai | C# | using System;
using System.Linq.Expressions;
using UnityEngine;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
public class BaseEditor<T> : Editor
where T : MonoBehaviour
{
protected T m_Target
{
get { return (T)target; }
}
protected SerializedProperty FindProperty<TValue>(Expression<Func<T, TValue>> expr)
{
return serializedObject.FindProperty(RuntimeUtilities.GetFieldPath(expr));
}
}
}
| 22 |
orrb | openai | C# | using System;
using System.Collections.Generic;
using System.Linq;
using UnityEngine;
using UnityEngine.Assertions;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
public sealed class EffectListEditor
{
Editor m_BaseEditor;
PostProcessProfile m_Asset;
SerializedObject m_SerializedObject;
SerializedProperty m_SettingsProperty;
Dictionary<Type, Type> m_EditorTypes; // SettingsType => EditorType
List<PostProcessEffectBaseEditor> m_Editors;
public EffectListEditor(Editor editor)
{
Assert.IsNotNull(editor);
m_BaseEditor = editor;
}
public void Init(PostProcessProfile asset, SerializedObject serializedObject)
{
Assert.IsNotNull(asset);
Assert.IsNotNull(serializedObject);
m_Asset = asset;
m_SerializedObject = serializedObject;
m_SettingsProperty = serializedObject.FindProperty("settings");
Assert.IsNotNull(m_SettingsProperty);
m_EditorTypes = new Dictionary<Type, Type>();
m_Editors = new List<PostProcessEffectBaseEditor>();
// Gets the list of all available postfx editors
var editorTypes = RuntimeUtilities.GetAllAssemblyTypes()
.Where(
t => t.IsSubclassOf(typeof(PostProcessEffectBaseEditor))
&& t.IsDefined(typeof(PostProcessEditorAttribute), false)
&& !t.IsAbstract
);
// Map them to their corresponding settings type
foreach (var editorType in editorTypes)
{
var attribute = editorType.GetAttribute<PostProcessEditorAttribute>();
m_EditorTypes.Add(attribute.settingsType, editorType);
}
// Create editors for existing settings
for (int i = 0; i < m_Asset.settings.Count; i++)
CreateEditor(m_Asset.settings[i], m_SettingsProperty.GetArrayElementAtIndex(i));
// Keep track of undo/redo to redraw the inspector when that happens
Undo.undoRedoPerformed += OnUndoRedoPerformed;
}
void OnUndoRedoPerformed()
{
m_Asset.isDirty = true;
// Dumb hack to make sure the serialized object is up to date on undo (else there'll be
// a state mismatch when this class is used in a GameObject inspector).
m_SerializedObject.Update();
m_SerializedObject.ApplyModifiedProperties();
// Seems like there's an issue with the inspector not repainting after some undo events
// This will take care of that
m_BaseEditor.Repaint();
}
void CreateEditor(PostProcessEffectSettings settings, SerializedProperty property, int index = -1)
{
var settingsType = settings.GetType();
Type editorType;
if (!m_EditorTypes.TryGetValue(settingsType, out editorType))
editorType = typeof(DefaultPostProcessEffectEditor);
var editor = (PostProcessEffectBaseEditor)Activator.CreateInstance(editorType);
editor.Init(settings, m_BaseEditor);
editor.baseProperty = property.Copy();
if (index < 0)
m_Editors.Add(editor);
else
m_Editors[index] = editor;
}
// Clears & recreate all editors - mainly used when the volume has been modified outside of
// the editor (user scripts, inspector reset etc).
void RefreshEditors()
{
// Disable all editors first
foreach (var editor in m_Editors)
editor.OnDisable();
// Remove them
m_Editors.Clear();
// Recreate editors for existing settings, if any
for (int i = 0; i < m_Asset.settings.Count; i++)
CreateEditor(m_Asset.settings[i], m_SettingsProperty.GetArrayElementAtIndex(i));
}
public void Clear()
{
if (m_Editors == null)
return; // Hasn't been inited yet
foreach (var editor in m_Editors)
editor.OnDisable();
m_Editors.Clear();
m_EditorTypes.Clear();
Undo.undoRedoPerformed -= OnUndoRedoPerformed;
}
public void OnGUI()
{
if (m_Asset == null)
return;
if (m_Asset.isDirty)
{
RefreshEditors();
m_Asset.isDirty = false;
}
bool isEditable = !VersionControl.Provider.isActive
|| AssetDatabase.IsOpenForEdit(m_Asset, StatusQueryOptions.UseCachedIfPossible);
using (new EditorGUI.DisabledScope(!isEditable))
{
EditorGUILayout.LabelField(EditorUtilities.GetContent("Overrides"), EditorStyles.boldLabel);
// Override list
for (int i = 0; i < m_Editors.Count; i++)
{
var editor = m_Editors[i];
string title = editor.GetDisplayTitle();
int id = i; // Needed for closure capture below
EditorUtilities.DrawSplitter();
bool displayContent = EditorUtilities.DrawHeader(
title,
editor.baseProperty,
editor.activeProperty,
editor.target,
() => ResetEffectOverride(editor.target.GetType(), id),
() => RemoveEffectOverride(id)
);
if (displayContent)
{
using (new EditorGUI.DisabledScope(!editor.activeProperty.boolValue))
editor.OnInternalInspectorGUI();
}
}
if (m_Editors.Count > 0)
{
EditorUtilities.DrawSplitter();
EditorGUILayout.Space();
}
else
{
EditorGUILayout.HelpBox("No override set on this volume.", MessageType.Info);
}
if (GUILayout.Button("Add effect...", EditorStyles.miniButton))
{
var menu = new GenericMenu();
var typeMap = PostProcessManager.instance.settingsTypes;
foreach (var kvp in typeMap)
{
var type = kvp.Key;
var title = EditorUtilities.GetContent(kvp.Value.menuItem);
bool exists = m_Asset.HasSettings(type);
if (!exists)
menu.AddItem(title, false, () => AddEffectOverride(type));
else
menu.AddDisabledItem(title);
}
menu.ShowAsContext();
}
EditorGUILayout.Space();
}
}
void AddEffectOverride(Type type)
{
m_SerializedObject.Update();
var effect = CreateNewEffect(type);
Undo.RegisterCreatedObjectUndo(effect, "Add Effect Override");
// Store this new effect as a subasset so we can reference it safely afterwards
AssetDatabase.AddObjectToAsset(effect, m_Asset);
// Grow the list first, then add - that's how serialized lists work in Unity
m_SettingsProperty.arraySize++;
var effectProp = m_SettingsProperty.GetArrayElementAtIndex(m_SettingsProperty.arraySize - 1);
effectProp.objectReferenceValue = effect;
// Force save / refresh
EditorUtility.SetDirty(m_Asset);
AssetDatabase.SaveAssets();
// Create & store the internal editor object for this effect
CreateEditor(effect, effectProp);
m_SerializedObject.ApplyModifiedProperties();
}
void RemoveEffectOverride(int id)
{
// Huh. Hack to keep foldout state on the next element...
bool nextFoldoutState = false;
if (id < m_Editors.Count - 1)
nextFoldoutState = m_Editors[id + 1].baseProperty.isExpanded;
// Remove from the cached editors list
m_Editors[id].OnDisable();
m_Editors.RemoveAt(id);
m_SerializedObject.Update();
var property = m_SettingsProperty.GetArrayElementAtIndex(id);
var effect = property.objectReferenceValue;
// Unassign it (should be null already but serialization does funky things
property.objectReferenceValue = null;
// ...and remove the array index itself from the list
m_SettingsProperty.DeleteArrayElementAtIndex(id);
// Finally refresh editor reference to the serialized settings list
for (int i = 0; i < m_Editors.Count; i++)
m_Editors[i].baseProperty = m_SettingsProperty.GetArrayElementAtIndex(i).Copy();
if (id < m_Editors.Count)
m_Editors[id].baseProperty.isExpanded = nextFoldoutState;
m_SerializedObject.ApplyModifiedProperties();
// Destroy the setting object after ApplyModifiedProperties(). If we do it before, redo
// actions will be in the wrong order and the reference to the setting object in the
// list will be lost.
Undo.DestroyObjectImmediate(effect);
// Force save / refresh
EditorUtility.SetDirty(m_Asset);
AssetDatabase.SaveAssets();
}
// Reset is done by deleting and removing the object from the list and adding a new one in
// the place as it was before
void ResetEffectOverride(Type type, int id)
{
// Remove from the cached editors list
m_Editors[id].OnDisable();
m_Editors[id] = null;
m_SerializedObject.Update();
var property = m_SettingsProperty.GetArrayElementAtIndex(id);
var prevSettings = property.objectReferenceValue;
// Unassign it but down remove it from the array to keep the index available
property.objectReferenceValue = null;
// Create a new object
var newEffect = CreateNewEffect(type);
Undo.RegisterCreatedObjectUndo(newEffect, "Reset Effect Override");
// Store this new effect as a subasset so we can reference it safely afterwards
AssetDatabase.AddObjectToAsset(newEffect, m_Asset);
// Put it in the reserved space
property.objectReferenceValue = newEffect;
// Create & store the internal editor object for this effect
CreateEditor(newEffect, property, id);
m_SerializedObject.ApplyModifiedProperties();
// Same as RemoveEffectOverride, destroy at the end so it's recreated first on Undo to
// make sure the GUID exists before undoing the list state
Undo.DestroyObjectImmediate(prevSettings);
// Force save / refresh
EditorUtility.SetDirty(m_Asset);
AssetDatabase.SaveAssets();
}
PostProcessEffectSettings CreateNewEffect(Type type)
{
var effect = (PostProcessEffectSettings)ScriptableObject.CreateInstance(type);
effect.hideFlags = HideFlags.HideInInspector | HideFlags.HideInHierarchy;
effect.name = type.Name;
effect.enabled.value = true;
return effect;
}
}
}
| 317 |
orrb | openai | C# | using UnityEngine;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
[CustomEditor(typeof(PostProcessDebug))]
public sealed class PostProcessDebugEditor : BaseEditor<PostProcessDebug>
{
SerializedProperty m_PostProcessLayer;
SerializedProperty m_LightMeterEnabled;
SerializedProperty m_HistogramEnabled;
SerializedProperty m_WaveformEnabled;
SerializedProperty m_VectorscopeEnabled;
SerializedProperty m_Overlay;
SerializedObject m_LayerObject;
SerializedProperty m_LightMeterShowCurves;
SerializedProperty m_HistogramChannel;
SerializedProperty m_WaveformExposure;
SerializedProperty m_VectorscopeExposure;
SerializedProperty m_MotionColorIntensity;
SerializedProperty m_MotionGridSize;
SerializedProperty m_ColorBlindness;
SerializedProperty m_ColorBlindnessStrength;
void OnEnable()
{
m_PostProcessLayer = FindProperty(x => x.postProcessLayer);
m_LightMeterEnabled = FindProperty(x => x.lightMeter);
m_HistogramEnabled = FindProperty(x => x.histogram);
m_WaveformEnabled = FindProperty(x => x.waveform);
m_VectorscopeEnabled = FindProperty(x => x.vectorscope);
m_Overlay = FindProperty(x => x.debugOverlay);
if (m_PostProcessLayer.objectReferenceValue != null)
RebuildProperties();
}
void RebuildProperties()
{
if (m_PostProcessLayer.objectReferenceValue == null)
return;
m_LayerObject = new SerializedObject(m_Target.postProcessLayer);
m_LightMeterShowCurves = m_LayerObject.FindProperty("debugLayer.lightMeter.showCurves");
m_HistogramChannel = m_LayerObject.FindProperty("debugLayer.histogram.channel");
m_WaveformExposure = m_LayerObject.FindProperty("debugLayer.waveform.exposure");
m_VectorscopeExposure = m_LayerObject.FindProperty("debugLayer.vectorscope.exposure");
m_MotionColorIntensity = m_LayerObject.FindProperty("debugLayer.overlaySettings.motionColorIntensity");
m_MotionGridSize = m_LayerObject.FindProperty("debugLayer.overlaySettings.motionGridSize");
m_ColorBlindness = m_LayerObject.FindProperty("debugLayer.overlaySettings.colorBlindnessType");
m_ColorBlindnessStrength = m_LayerObject.FindProperty("debugLayer.overlaySettings.colorBlindnessStrength");
}
public override void OnInspectorGUI()
{
serializedObject.Update();
using (var changed = new EditorGUI.ChangeCheckScope())
{
EditorGUILayout.PropertyField(m_PostProcessLayer);
serializedObject.ApplyModifiedProperties(); // Needed to rebuild properties after a change
serializedObject.Update();
if (changed.changed)
RebuildProperties();
}
if (m_PostProcessLayer.objectReferenceValue != null)
{
m_LayerObject.Update();
// Overlays
EditorGUILayout.Space();
EditorGUILayout.LabelField(EditorUtilities.GetContent("Overlay"), EditorStyles.boldLabel);
EditorGUI.indentLevel++;
EditorGUILayout.PropertyField(m_Overlay);
DoOverlayGUI(DebugOverlay.MotionVectors, m_MotionColorIntensity, m_MotionGridSize);
DoOverlayGUI(DebugOverlay.ColorBlindnessSimulation, m_ColorBlindness, m_ColorBlindnessStrength);
// Special cases
if (m_Overlay.intValue == (int)DebugOverlay.NANTracker && m_Target.postProcessLayer.stopNaNPropagation)
EditorGUILayout.HelpBox("Disable \"Stop NaN Propagation\" in the Post-process layer or NaNs will be overwritten!", MessageType.Warning);
EditorGUI.indentLevel--;
// Monitors
EditorGUILayout.Space();
EditorGUILayout.LabelField(EditorUtilities.GetContent("Monitors"), EditorStyles.boldLabel);
EditorGUI.indentLevel++;
DoMonitorGUI(EditorUtilities.GetContent("Light Meter"), m_LightMeterEnabled, m_LightMeterShowCurves);
DoMonitorGUI(EditorUtilities.GetContent("Histogram"), m_HistogramEnabled, m_HistogramChannel);
DoMonitorGUI(EditorUtilities.GetContent("Waveform"), m_WaveformEnabled, m_WaveformExposure);
DoMonitorGUI(EditorUtilities.GetContent("Vectoscope"), m_VectorscopeEnabled, m_VectorscopeExposure);
EditorGUI.indentLevel--;
m_LayerObject.ApplyModifiedProperties();
}
serializedObject.ApplyModifiedProperties();
}
void DoMonitorGUI(GUIContent content, SerializedProperty prop, params SerializedProperty[] settings)
{
EditorGUILayout.PropertyField(prop, content);
if (settings == null || settings.Length == 0)
return;
if (prop.boolValue)
{
EditorGUI.indentLevel++;
foreach (var p in settings)
EditorGUILayout.PropertyField(p);
EditorGUI.indentLevel--;
}
}
void DoOverlayGUI(DebugOverlay overlay, params SerializedProperty[] settings)
{
if (m_Overlay.intValue != (int)overlay)
return;
if (settings == null || settings.Length == 0)
return;
foreach (var p in settings)
EditorGUILayout.PropertyField(p);
}
}
}
| 136 |
orrb | openai | C# | using System;
using UnityEngine;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
public class PostProcessEffectBaseEditor
{
internal PostProcessEffectSettings target { get; private set; }
internal SerializedObject serializedObject { get; private set; }
internal SerializedProperty baseProperty;
internal SerializedProperty activeProperty;
SerializedProperty m_Enabled;
Editor m_Inspector;
internal PostProcessEffectBaseEditor()
{
}
public void Repaint()
{
m_Inspector.Repaint();
}
internal void Init(PostProcessEffectSettings target, Editor inspector)
{
this.target = target;
m_Inspector = inspector;
serializedObject = new SerializedObject(target);
m_Enabled = serializedObject.FindProperty("enabled.value");
activeProperty = serializedObject.FindProperty("active");
OnEnable();
}
public virtual void OnEnable()
{
}
public virtual void OnDisable()
{
}
internal void OnInternalInspectorGUI()
{
serializedObject.Update();
TopRowFields();
OnInspectorGUI();
EditorGUILayout.Space();
serializedObject.ApplyModifiedProperties();
}
public virtual void OnInspectorGUI()
{
}
public virtual string GetDisplayTitle()
{
return ObjectNames.NicifyVariableName(target.GetType().Name);
}
void TopRowFields()
{
using (new EditorGUILayout.HorizontalScope())
{
if (GUILayout.Button(EditorUtilities.GetContent("All|Toggle all overrides on. To maximize performances you should only toggle overrides that you actually need."), Styling.miniLabelButton, GUILayout.Width(17f), GUILayout.ExpandWidth(false)))
SetAllOverridesTo(true);
if (GUILayout.Button(EditorUtilities.GetContent("None|Toggle all overrides off."), Styling.miniLabelButton, GUILayout.Width(32f), GUILayout.ExpandWidth(false)))
SetAllOverridesTo(false);
GUILayout.FlexibleSpace();
bool enabled = m_Enabled.boolValue;
enabled = GUILayout.Toggle(enabled, EditorUtilities.GetContent("On|Enable this effect."), EditorStyles.miniButtonLeft, GUILayout.Width(35f), GUILayout.ExpandWidth(false));
enabled = !GUILayout.Toggle(!enabled, EditorUtilities.GetContent("Off|Disable this effect."), EditorStyles.miniButtonRight, GUILayout.Width(35f), GUILayout.ExpandWidth(false));
m_Enabled.boolValue = enabled;
}
}
void SetAllOverridesTo(bool state)
{
Undo.RecordObject(target, "Toggle All");
target.SetAllOverridesTo(state);
serializedObject.Update();
}
protected void PropertyField(SerializedParameterOverride property)
{
var title = EditorUtilities.GetContent(property.displayName);
PropertyField(property, title);
}
protected void PropertyField(SerializedParameterOverride property, GUIContent title)
{
// Check for DisplayNameAttribute first
var displayNameAttr = property.GetAttribute<DisplayNameAttribute>();
if (displayNameAttr != null)
title.text = displayNameAttr.displayName;
// Add tooltip if it's missing and an attribute is available
if (string.IsNullOrEmpty(title.tooltip))
{
var tooltipAttr = property.GetAttribute<TooltipAttribute>();
if (tooltipAttr != null)
title.tooltip = tooltipAttr.tooltip;
}
// Look for a compatible attribute decorator
AttributeDecorator decorator = null;
Attribute attribute = null;
foreach (var attr in property.attributes)
{
// Use the first decorator we found
if (decorator == null)
{
decorator = EditorUtilities.GetDecorator(attr.GetType());
attribute = attr;
}
// Draw unity built-in Decorators (Space, Header)
if (attr is PropertyAttribute)
{
if (attr is SpaceAttribute)
{
EditorGUILayout.GetControlRect(false, (attr as SpaceAttribute).height);
}
else if (attr is HeaderAttribute)
{
var rect = EditorGUILayout.GetControlRect(false, 24f);
rect.y += 8f;
rect = EditorGUI.IndentedRect(rect);
EditorGUI.LabelField(rect, (attr as HeaderAttribute).header, Styling.labelHeader);
}
}
}
bool invalidProp = false;
if (decorator != null && !decorator.IsAutoProperty())
{
if (decorator.OnGUI(property.value, property.overrideState, title, attribute))
return;
// Attribute is invalid for the specified property; use default unity field instead
invalidProp = true;
}
using (new EditorGUILayout.HorizontalScope())
{
// Override checkbox
var overrideRect = GUILayoutUtility.GetRect(17f, 17f, GUILayout.ExpandWidth(false));
overrideRect.yMin += 4f;
EditorUtilities.DrawOverrideCheckbox(overrideRect, property.overrideState);
// Property
using (new EditorGUI.DisabledScope(!property.overrideState.boolValue))
{
if (decorator != null && !invalidProp)
{
if (decorator.OnGUI(property.value, property.overrideState, title, attribute))
return;
}
// Default unity field
if (property.value.hasVisibleChildren
&& property.value.propertyType != SerializedPropertyType.Vector2
&& property.value.propertyType != SerializedPropertyType.Vector3)
{
GUILayout.Space(12f);
EditorGUILayout.PropertyField(property.value, title, true);
}
else
{
EditorGUILayout.PropertyField(property.value, title);
}
}
}
}
}
}
| 184 |
orrb | openai | C# | using System;
using System.Linq.Expressions;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
public class PostProcessEffectEditor<T> : PostProcessEffectBaseEditor
where T : PostProcessEffectSettings
{
protected SerializedProperty FindProperty<TValue>(Expression<Func<T, TValue>> expr)
{
return serializedObject.FindProperty(RuntimeUtilities.GetFieldPath(expr));
}
protected SerializedParameterOverride FindParameterOverride<TValue>(Expression<Func<T, TValue>> expr)
{
var property = serializedObject.FindProperty(RuntimeUtilities.GetFieldPath(expr));
var attributes = RuntimeUtilities.GetMemberAttributes(expr);
return new SerializedParameterOverride(property, attributes);
}
}
}
| 23 |
orrb | openai | C# | using System;
using System.Collections.Generic;
using System.Linq;
using UnityEngine;
using UnityEngine.Rendering.PostProcessing;
using UnityEditorInternal;
using System.IO;
namespace UnityEditor.Rendering.PostProcessing
{
using SerializedBundleRef = PostProcessLayer.SerializedBundleRef;
using EXRFlags = Texture2D.EXRFlags;
[CanEditMultipleObjects, CustomEditor(typeof(PostProcessLayer))]
public sealed class PostProcessLayerEditor : BaseEditor<PostProcessLayer>
{
SerializedProperty m_StopNaNPropagation;
SerializedProperty m_VolumeTrigger;
SerializedProperty m_VolumeLayer;
SerializedProperty m_AntialiasingMode;
SerializedProperty m_TaaJitterSpread;
SerializedProperty m_TaaSharpness;
SerializedProperty m_TaaStationaryBlending;
SerializedProperty m_TaaMotionBlending;
SerializedProperty m_FxaaMobileOptimized;
SerializedProperty m_FxaaKeepAlpha;
SerializedProperty m_FogEnabled;
SerializedProperty m_FogExcludeSkybox;
SerializedProperty m_ShowToolkit;
SerializedProperty m_ShowCustomSorter;
Dictionary<PostProcessEvent, ReorderableList> m_CustomLists;
static GUIContent[] s_AntialiasingMethodNames =
{
new GUIContent("No Anti-aliasing"),
new GUIContent("Fast Approximate Anti-aliasing (FXAA)"),
new GUIContent("Subpixel Morphological Anti-aliasing (SMAA)"),
new GUIContent("Temporal Anti-aliasing (TAA)")
};
enum ExportMode
{
FullFrame,
DisablePost,
BreakBeforeColorGradingLinear,
BreakBeforeColorGradingLog
}
void OnEnable()
{
m_StopNaNPropagation = FindProperty(x => x.stopNaNPropagation);
m_VolumeTrigger = FindProperty(x => x.volumeTrigger);
m_VolumeLayer = FindProperty(x => x.volumeLayer);
m_AntialiasingMode = FindProperty(x => x.antialiasingMode);
m_TaaJitterSpread = FindProperty(x => x.temporalAntialiasing.jitterSpread);
m_TaaSharpness = FindProperty(x => x.temporalAntialiasing.sharpness);
m_TaaStationaryBlending = FindProperty(x => x.temporalAntialiasing.stationaryBlending);
m_TaaMotionBlending = FindProperty(x => x.temporalAntialiasing.motionBlending);
m_FxaaMobileOptimized = FindProperty(x => x.fastApproximateAntialiasing.fastMode);
m_FxaaKeepAlpha = FindProperty(x => x.fastApproximateAntialiasing.keepAlpha);
m_FogEnabled = FindProperty(x => x.fog.enabled);
m_FogExcludeSkybox = FindProperty(x => x.fog.excludeSkybox);
m_ShowToolkit = serializedObject.FindProperty("m_ShowToolkit");
m_ShowCustomSorter = serializedObject.FindProperty("m_ShowCustomSorter");
}
void OnDisable()
{
m_CustomLists = null;
}
public override void OnInspectorGUI()
{
serializedObject.Update();
var camera = m_Target.GetComponent<Camera>();
#if !UNITY_2017_2_OR_NEWER
if (RuntimeUtilities.isSinglePassStereoSelected)
EditorGUILayout.HelpBox("Unity 2017.2+ required for full Single-pass stereo rendering support.", MessageType.Warning);
#endif
DoVolumeBlending();
DoAntialiasing();
DoFog(camera);
EditorGUILayout.PropertyField(m_StopNaNPropagation, EditorUtilities.GetContent("Stop NaN Propagation|Automatically replaces NaN/Inf in shaders by a black pixel to avoid breaking some effects. This will slightly affect performances and should only be used if you experience NaN issues that you can't fix. Has no effect on GLES2 platforms."));
EditorGUILayout.Space();
DoToolkit();
DoCustomEffectSorter();
EditorUtilities.DrawSplitter();
EditorGUILayout.Space();
serializedObject.ApplyModifiedProperties();
}
void DoVolumeBlending()
{
EditorGUILayout.LabelField(EditorUtilities.GetContent("Volume blending"), EditorStyles.boldLabel);
EditorGUI.indentLevel++;
{
// The layout system sort of break alignement when mixing inspector fields with
// custom layouted fields, do the layout manually instead
var indentOffset = EditorGUI.indentLevel * 15f;
var lineRect = GUILayoutUtility.GetRect(1, EditorGUIUtility.singleLineHeight);
var labelRect = new Rect(lineRect.x, lineRect.y, EditorGUIUtility.labelWidth - indentOffset, lineRect.height);
var fieldRect = new Rect(labelRect.xMax, lineRect.y, lineRect.width - labelRect.width - 60f, lineRect.height);
var buttonRect = new Rect(fieldRect.xMax, lineRect.y, 60f, lineRect.height);
EditorGUI.PrefixLabel(labelRect, EditorUtilities.GetContent("Trigger|A transform that will act as a trigger for volume blending."));
m_VolumeTrigger.objectReferenceValue = (Transform)EditorGUI.ObjectField(fieldRect, m_VolumeTrigger.objectReferenceValue, typeof(Transform), true);
if (GUI.Button(buttonRect, EditorUtilities.GetContent("This|Assigns the current GameObject as a trigger."), EditorStyles.miniButton))
m_VolumeTrigger.objectReferenceValue = m_Target.transform;
if (m_VolumeTrigger.objectReferenceValue == null)
EditorGUILayout.HelpBox("No trigger has been set, the camera will only be affected by global volumes.", MessageType.Info);
EditorGUILayout.PropertyField(m_VolumeLayer, EditorUtilities.GetContent("Layer|This camera will only be affected by volumes in the selected scene-layers."));
int mask = m_VolumeLayer.intValue;
if (mask == 0)
EditorGUILayout.HelpBox("No layer has been set, the trigger will never be affected by volumes.", MessageType.Warning);
else if (mask == -1 || ((mask & 1) != 0))
EditorGUILayout.HelpBox("Do not use \"Everything\" or \"Default\" as a layer mask as it will slow down the volume blending process! Put post-processing volumes in their own dedicated layer for best performances.", MessageType.Warning);
}
EditorGUI.indentLevel--;
EditorGUILayout.Space();
}
void DoAntialiasing()
{
EditorGUILayout.LabelField(EditorUtilities.GetContent("Anti-aliasing"), EditorStyles.boldLabel);
EditorGUI.indentLevel++;
{
m_AntialiasingMode.intValue = EditorGUILayout.Popup(EditorUtilities.GetContent("Mode|The anti-aliasing method to use. FXAA is fast but low quality. SMAA works well for non-HDR scenes. TAA is a bit slower but higher quality and works well with HDR."), m_AntialiasingMode.intValue, s_AntialiasingMethodNames);
if (m_AntialiasingMode.intValue == (int)PostProcessLayer.Antialiasing.TemporalAntialiasing)
{
#if !UNITY_2017_3_OR_NEWER
if (RuntimeUtilities.isSinglePassStereoSelected)
EditorGUILayout.HelpBox("TAA requires Unity 2017.3+ for Single-pass stereo rendering support.", MessageType.Warning);
#endif
EditorGUILayout.PropertyField(m_TaaJitterSpread);
EditorGUILayout.PropertyField(m_TaaStationaryBlending);
EditorGUILayout.PropertyField(m_TaaMotionBlending);
EditorGUILayout.PropertyField(m_TaaSharpness);
}
else if (m_AntialiasingMode.intValue == (int)PostProcessLayer.Antialiasing.SubpixelMorphologicalAntialiasing)
{
if (RuntimeUtilities.isSinglePassStereoSelected)
EditorGUILayout.HelpBox("SMAA doesn't work with Single-pass stereo rendering.", MessageType.Warning);
}
else if (m_AntialiasingMode.intValue == (int)PostProcessLayer.Antialiasing.FastApproximateAntialiasing)
{
EditorGUILayout.PropertyField(m_FxaaMobileOptimized);
EditorGUILayout.PropertyField(m_FxaaKeepAlpha);
}
}
EditorGUI.indentLevel--;
EditorGUILayout.Space();
}
void DoFog(Camera camera)
{
if (camera == null || camera.actualRenderingPath != RenderingPath.DeferredShading)
return;
EditorGUILayout.LabelField(EditorUtilities.GetContent("Deferred Fog"), EditorStyles.boldLabel);
EditorGUI.indentLevel++;
{
EditorGUILayout.PropertyField(m_FogEnabled);
if (m_FogEnabled.boolValue)
{
EditorGUILayout.PropertyField(m_FogExcludeSkybox);
EditorGUILayout.HelpBox("This adds fog compatibility to the deferred rendering path; actual fog settings should be set in the Lighting panel.", MessageType.Info);
}
}
EditorGUI.indentLevel--;
EditorGUILayout.Space();
}
void DoToolkit()
{
EditorUtilities.DrawSplitter();
m_ShowToolkit.boolValue = EditorUtilities.DrawHeader("Toolkit", m_ShowToolkit.boolValue);
if (m_ShowToolkit.boolValue)
{
GUILayout.Space(2);
if (GUILayout.Button(EditorUtilities.GetContent("Export frame to EXR..."), EditorStyles.miniButton))
{
var menu = new GenericMenu();
menu.AddItem(EditorUtilities.GetContent("Full Frame (as displayed)"), false, () => ExportFrameToExr(ExportMode.FullFrame));
menu.AddItem(EditorUtilities.GetContent("Disable post-processing"), false, () => ExportFrameToExr(ExportMode.DisablePost));
menu.AddItem(EditorUtilities.GetContent("Break before Color Grading (Linear)"), false, () => ExportFrameToExr(ExportMode.BreakBeforeColorGradingLinear));
menu.AddItem(EditorUtilities.GetContent("Break before Color Grading (Log)"), false, () => ExportFrameToExr(ExportMode.BreakBeforeColorGradingLog));
menu.ShowAsContext();
}
if (GUILayout.Button(EditorUtilities.GetContent("Select all layer volumes|Selects all the volumes that will influence this layer."), EditorStyles.miniButton))
{
var volumes = RuntimeUtilities.GetAllSceneObjects<PostProcessVolume>()
.Where(x => (m_VolumeLayer.intValue & (1 << x.gameObject.layer)) != 0)
.Select(x => x.gameObject)
.Cast<UnityEngine.Object>()
.ToArray();
if (volumes.Length > 0)
Selection.objects = volumes;
}
if (GUILayout.Button(EditorUtilities.GetContent("Select all active volumes|Selects all volumes currently affecting the layer."), EditorStyles.miniButton))
{
var volumes = new List<PostProcessVolume>();
PostProcessManager.instance.GetActiveVolumes(m_Target, volumes);
if (volumes.Count > 0)
{
Selection.objects = volumes
.Select(x => x.gameObject)
.Cast<UnityEngine.Object>()
.ToArray();
}
}
GUILayout.Space(3);
}
}
void DoCustomEffectSorter()
{
EditorUtilities.DrawSplitter();
m_ShowCustomSorter.boolValue = EditorUtilities.DrawHeader("Custom Effect Sorting", m_ShowCustomSorter.boolValue);
if (m_ShowCustomSorter.boolValue)
{
bool isInPrefab = false;
// Init lists if needed
if (m_CustomLists == null)
{
// In some cases the editor will refresh before components which means
// components might not have been fully initialized yet. In this case we also
// need to make sure that we're not in a prefab as sorteBundles isn't a
// serializable object and won't exist until put on a scene.
if (m_Target.sortedBundles == null)
{
isInPrefab = string.IsNullOrEmpty(m_Target.gameObject.scene.name);
if (!isInPrefab)
{
// sortedBundles will be initialized and ready to use on the next frame
Repaint();
}
}
else
{
// Create a reorderable list for each injection event
m_CustomLists = new Dictionary<PostProcessEvent, ReorderableList>();
foreach (var evt in Enum.GetValues(typeof(PostProcessEvent)).Cast<PostProcessEvent>())
{
var bundles = m_Target.sortedBundles[evt];
var listName = ObjectNames.NicifyVariableName(evt.ToString());
var list = new ReorderableList(bundles, typeof(SerializedBundleRef), true, true, false, false);
list.drawHeaderCallback = (rect) =>
{
EditorGUI.LabelField(rect, listName);
};
list.drawElementCallback = (rect, index, isActive, isFocused) =>
{
var sbr = (SerializedBundleRef)list.list[index];
EditorGUI.LabelField(rect, sbr.bundle.attribute.menuItem);
};
list.onReorderCallback = (l) =>
{
EditorUtility.SetDirty(m_Target);
};
m_CustomLists.Add(evt, list);
}
}
}
GUILayout.Space(5);
if (isInPrefab)
{
EditorGUILayout.HelpBox("Not supported in prefabs.", MessageType.Info);
GUILayout.Space(3);
return;
}
bool anyList = false;
if (m_CustomLists != null)
{
foreach (var kvp in m_CustomLists)
{
var list = kvp.Value;
// Skip empty lists to avoid polluting the inspector
if (list.count == 0)
continue;
list.DoLayoutList();
anyList = true;
}
}
if (!anyList)
{
EditorGUILayout.HelpBox("No custom effect loaded.", MessageType.Info);
GUILayout.Space(3);
}
}
}
void ExportFrameToExr(ExportMode mode)
{
string path = EditorUtility.SaveFilePanel("Export EXR...", "", "Frame", "exr");
if (string.IsNullOrEmpty(path))
return;
EditorUtility.DisplayProgressBar("Export EXR", "Rendering...", 0f);
var camera = m_Target.GetComponent<Camera>();
var w = camera.pixelWidth;
var h = camera.pixelHeight;
var texOut = new Texture2D(w, h, TextureFormat.RGBAFloat, false, true);
var target = RenderTexture.GetTemporary(w, h, 24, RenderTextureFormat.ARGBFloat, RenderTextureReadWrite.Linear);
var lastActive = RenderTexture.active;
var lastTargetSet = camera.targetTexture;
var lastPostFXState = m_Target.enabled;
var lastBreakColorGradingState = m_Target.breakBeforeColorGrading;
if (mode == ExportMode.DisablePost)
m_Target.enabled = false;
else if (mode == ExportMode.BreakBeforeColorGradingLinear || mode == ExportMode.BreakBeforeColorGradingLog)
m_Target.breakBeforeColorGrading = true;
camera.targetTexture = target;
camera.Render();
camera.targetTexture = lastTargetSet;
EditorUtility.DisplayProgressBar("Export EXR", "Reading...", 0.25f);
m_Target.enabled = lastPostFXState;
m_Target.breakBeforeColorGrading = lastBreakColorGradingState;
if (mode == ExportMode.BreakBeforeColorGradingLog)
{
// Convert to log
var material = new Material(Shader.Find("Hidden/PostProcessing/Editor/ConvertToLog"));
var newTarget = RenderTexture.GetTemporary(w, h, 0, RenderTextureFormat.ARGBFloat, RenderTextureReadWrite.Linear);
Graphics.Blit(target, newTarget, material, 0);
RenderTexture.ReleaseTemporary(target);
DestroyImmediate(material);
target = newTarget;
}
RenderTexture.active = target;
texOut.ReadPixels(new Rect(0, 0, w, h), 0, 0);
texOut.Apply();
RenderTexture.active = lastActive;
EditorUtility.DisplayProgressBar("Export EXR", "Encoding...", 0.5f);
var bytes = texOut.EncodeToEXR(EXRFlags.OutputAsFloat | EXRFlags.CompressZIP);
EditorUtility.DisplayProgressBar("Export EXR", "Saving...", 0.75f);
File.WriteAllBytes(path, bytes);
EditorUtility.ClearProgressBar();
AssetDatabase.Refresh();
RenderTexture.ReleaseTemporary(target);
DestroyImmediate(texOut);
}
}
}
| 403 |
orrb | openai | C# | using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
[CustomEditor(typeof(PostProcessProfile))]
sealed class PostProcessProfileEditor : Editor
{
EffectListEditor m_EffectList;
void OnEnable()
{
m_EffectList = new EffectListEditor(this);
m_EffectList.Init(target as PostProcessProfile, serializedObject);
}
void OnDisable()
{
if (m_EffectList != null)
m_EffectList.Clear();
}
public override void OnInspectorGUI()
{
serializedObject.Update();
m_EffectList.OnGUI();
serializedObject.ApplyModifiedProperties();
}
}
}
| 30 |
orrb | openai | C# | using UnityEngine;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
[CanEditMultipleObjects, CustomEditor(typeof(PostProcessVolume))]
public sealed class PostProcessVolumeEditor : BaseEditor<PostProcessVolume>
{
SerializedProperty m_Profile;
SerializedProperty m_IsGlobal;
SerializedProperty m_BlendRadius;
SerializedProperty m_Weight;
SerializedProperty m_Priority;
EffectListEditor m_EffectList;
void OnEnable()
{
m_Profile = FindProperty(x => x.sharedProfile);
m_IsGlobal = FindProperty(x => x.isGlobal);
m_BlendRadius = FindProperty(x => x.blendDistance);
m_Weight = FindProperty(x => x.weight);
m_Priority = FindProperty(x => x.priority);
m_EffectList = new EffectListEditor(this);
RefreshEffectListEditor(m_Target.sharedProfile);
}
void OnDisable()
{
if (m_EffectList != null)
m_EffectList.Clear();
}
void RefreshEffectListEditor(PostProcessProfile asset)
{
m_EffectList.Clear();
if (asset != null)
m_EffectList.Init(asset, new SerializedObject(asset));
}
public override void OnInspectorGUI()
{
serializedObject.Update();
EditorGUILayout.PropertyField(m_IsGlobal);
if (!m_IsGlobal.boolValue) // Blend radius is not needed for global volumes
EditorGUILayout.PropertyField(m_BlendRadius);
EditorGUILayout.PropertyField(m_Weight);
EditorGUILayout.PropertyField(m_Priority);
bool assetHasChanged = false;
bool showCopy = m_Profile.objectReferenceValue != null;
bool multiEdit = m_Profile.hasMultipleDifferentValues;
// The layout system sort of break alignement when mixing inspector fields with custom
// layouted fields, do the layout manually instead
int buttonWidth = showCopy ? 45 : 60;
float indentOffset = EditorGUI.indentLevel * 15f;
var lineRect = GUILayoutUtility.GetRect(1, EditorGUIUtility.singleLineHeight);
var labelRect = new Rect(lineRect.x, lineRect.y, EditorGUIUtility.labelWidth - indentOffset, lineRect.height);
var fieldRect = new Rect(labelRect.xMax, lineRect.y, lineRect.width - labelRect.width - buttonWidth * (showCopy ? 2 : 1), lineRect.height);
var buttonNewRect = new Rect(fieldRect.xMax, lineRect.y, buttonWidth, lineRect.height);
var buttonCopyRect = new Rect(buttonNewRect.xMax, lineRect.y, buttonWidth, lineRect.height);
EditorGUI.PrefixLabel(labelRect, EditorUtilities.GetContent("Profile|A reference to a profile asset."));
using (var scope = new EditorGUI.ChangeCheckScope())
{
EditorGUI.BeginProperty(fieldRect, GUIContent.none, m_Profile);
var profile = (PostProcessProfile)EditorGUI.ObjectField(fieldRect, m_Profile.objectReferenceValue, typeof(PostProcessProfile), false);
if (scope.changed)
{
assetHasChanged = true;
m_Profile.objectReferenceValue = profile;
}
EditorGUI.EndProperty();
}
using (new EditorGUI.DisabledScope(multiEdit))
{
if (GUI.Button(buttonNewRect, EditorUtilities.GetContent("New|Create a new profile."), showCopy ? EditorStyles.miniButtonLeft : EditorStyles.miniButton))
{
// By default, try to put assets in a folder next to the currently active
// scene file. If the user isn't a scene, put them in root instead.
var targetName = m_Target.name;
var scene = m_Target.gameObject.scene;
var asset = ProfileFactory.CreatePostProcessProfile(scene, targetName);
m_Profile.objectReferenceValue = asset;
assetHasChanged = true;
}
if (showCopy && GUI.Button(buttonCopyRect, EditorUtilities.GetContent("Clone|Create a new profile and copy the content of the currently assigned profile."), EditorStyles.miniButtonRight))
{
// Duplicate the currently assigned profile and save it as a new profile
var origin = (PostProcessProfile)m_Profile.objectReferenceValue;
var path = AssetDatabase.GetAssetPath(origin);
path = AssetDatabase.GenerateUniqueAssetPath(path);
var asset = Instantiate(origin);
asset.settings.Clear();
AssetDatabase.CreateAsset(asset, path);
foreach (var item in origin.settings)
{
var itemCopy = Instantiate(item);
itemCopy.hideFlags = HideFlags.HideInInspector | HideFlags.HideInHierarchy;
itemCopy.name = item.name;
asset.settings.Add(itemCopy);
AssetDatabase.AddObjectToAsset(itemCopy, asset);
}
AssetDatabase.SaveAssets();
AssetDatabase.Refresh();
m_Profile.objectReferenceValue = asset;
assetHasChanged = true;
}
}
EditorGUILayout.Space();
if (m_Profile.objectReferenceValue == null)
{
if (assetHasChanged)
m_EffectList.Clear(); // Asset wasn't null before, do some cleanup
EditorGUILayout.HelpBox("Assign a Post-process Profile to this volume using the \"Asset\" field or create one automatically by clicking the \"New\" button.\nAssets are automatically put in a folder next to your scene file. If you scene hasn't been saved yet they will be created at the root of the Assets folder.", MessageType.Info);
}
else
{
if (assetHasChanged)
RefreshEffectListEditor((PostProcessProfile)m_Profile.objectReferenceValue);
if (!multiEdit)
m_EffectList.OnGUI();
}
serializedObject.ApplyModifiedProperties();
}
}
}
| 151 |
orrb | openai | C# | using System;
namespace UnityEditor.Rendering.PostProcessing
{
[AttributeUsage(AttributeTargets.Class, AllowMultiple = false)]
public sealed class DecoratorAttribute : Attribute
{
public readonly Type attributeType;
public DecoratorAttribute(Type attributeType)
{
this.attributeType = attributeType;
}
}
}
| 16 |
orrb | openai | C# | using System;
namespace UnityEngine.Rendering.PostProcessing
{
[AttributeUsage(AttributeTargets.Class, AllowMultiple = false)]
public sealed class PostProcessEditorAttribute : Attribute
{
public readonly Type settingsType;
public PostProcessEditorAttribute(Type settingsType)
{
this.settingsType = settingsType;
}
}
}
| 16 |
orrb | openai | C# | using System;
using UnityEngine;
namespace UnityEditor.Rendering.PostProcessing
{
public abstract class AttributeDecorator
{
// Override this and return false if you want to customize the override checkbox position,
// else it'll automatically draw it and put the property content in a horizontal scope.
public virtual bool IsAutoProperty()
{
return true;
}
public abstract bool OnGUI(SerializedProperty property, SerializedProperty overrideState, GUIContent title, Attribute attribute);
}
}
| 18 |
orrb | openai | C# | using System;
using UnityEngine;
namespace UnityEditor.Rendering.PostProcessing
{
[Decorator(typeof(RangeAttribute))]
public sealed class RangeDecorator : AttributeDecorator
{
public override bool OnGUI(SerializedProperty property, SerializedProperty overrideState, GUIContent title, Attribute attribute)
{
var attr = (RangeAttribute)attribute;
if (property.propertyType == SerializedPropertyType.Float)
{
property.floatValue = EditorGUILayout.Slider(title, property.floatValue, attr.min, attr.max);
return true;
}
if (property.propertyType == SerializedPropertyType.Integer)
{
property.intValue = EditorGUILayout.IntSlider(title, property.intValue, (int)attr.min, (int)attr.max);
return true;
}
return false;
}
}
[Decorator(typeof(UnityEngine.Rendering.PostProcessing.MinAttribute))]
public sealed class MinDecorator : AttributeDecorator
{
public override bool OnGUI(SerializedProperty property, SerializedProperty overrideState, GUIContent title, Attribute attribute)
{
var attr = (UnityEngine.Rendering.PostProcessing.MinAttribute)attribute;
if (property.propertyType == SerializedPropertyType.Float)
{
float v = EditorGUILayout.FloatField(title, property.floatValue);
property.floatValue = Mathf.Max(v, attr.min);
return true;
}
if (property.propertyType == SerializedPropertyType.Integer)
{
int v = EditorGUILayout.IntField(title, property.intValue);
property.intValue = Mathf.Max(v, (int)attr.min);
return true;
}
return false;
}
}
[Decorator(typeof(UnityEngine.Rendering.PostProcessing.MaxAttribute))]
public sealed class MaxDecorator : AttributeDecorator
{
public override bool OnGUI(SerializedProperty property, SerializedProperty overrideState, GUIContent title, Attribute attribute)
{
var attr = (UnityEngine.Rendering.PostProcessing.MaxAttribute)attribute;
if (property.propertyType == SerializedPropertyType.Float)
{
float v = EditorGUILayout.FloatField(title, property.floatValue);
property.floatValue = Mathf.Min(v, attr.max);
return true;
}
if (property.propertyType == SerializedPropertyType.Integer)
{
int v = EditorGUILayout.IntField(title, property.intValue);
property.intValue = Mathf.Min(v, (int)attr.max);
return true;
}
return false;
}
}
[Decorator(typeof(UnityEngine.Rendering.PostProcessing.MinMaxAttribute))]
public sealed class MinMaxDecorator : AttributeDecorator
{
public override bool OnGUI(SerializedProperty property, SerializedProperty overrideState, GUIContent title, Attribute attribute)
{
var attr = (UnityEngine.Rendering.PostProcessing.MinMaxAttribute)attribute;
if (property.propertyType == SerializedPropertyType.Float)
{
float v = EditorGUILayout.FloatField(title, property.floatValue);
property.floatValue = Mathf.Clamp(v, attr.min, attr.max);
return true;
}
if (property.propertyType == SerializedPropertyType.Integer)
{
int v = EditorGUILayout.IntField(title, property.intValue);
property.intValue = Mathf.Clamp(v, (int)attr.min, (int)attr.max);
return true;
}
if (property.propertyType == SerializedPropertyType.Vector2)
{
var v = property.vector2Value;
EditorGUILayout.MinMaxSlider(title, ref v.x, ref v.y, attr.min, attr.max);
property.vector2Value = v;
return true;
}
return false;
}
}
[Decorator(typeof(ColorUsageAttribute))]
public sealed class ColorUsageDecorator : AttributeDecorator
{
public override bool OnGUI(SerializedProperty property, SerializedProperty overrideState, GUIContent title, Attribute attribute)
{
var attr = (ColorUsageAttribute)attribute;
if (property.propertyType != SerializedPropertyType.Color)
return false;
#if UNITY_2018_1_OR_NEWER
property.colorValue = EditorGUILayout.ColorField(title, property.colorValue, true, attr.showAlpha, attr.hdr);
#else
ColorPickerHDRConfig hdrConfig = null;
if (attr.hdr)
{
hdrConfig = new ColorPickerHDRConfig(
attr.minBrightness,
attr.maxBrightness,
attr.minExposureValue,
attr.maxExposureValue
);
}
property.colorValue = EditorGUILayout.ColorField(title, property.colorValue, true, attr.showAlpha, attr.hdr, hdrConfig);
#endif
return true;
}
}
}
| 144 |
orrb | openai | C# | using System;
using UnityEngine;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
[Decorator(typeof(TrackballAttribute))]
public sealed class TrackballDecorator : AttributeDecorator
{
static readonly int k_ThumbHash = "colorWheelThumb".GetHashCode();
static Material s_Material;
bool m_ResetState;
Vector2 m_CursorPos;
public override bool IsAutoProperty()
{
return false;
}
public override bool OnGUI(SerializedProperty property, SerializedProperty overrideState, GUIContent title, Attribute attribute)
{
if (property.propertyType != SerializedPropertyType.Vector4)
return false;
var value = property.vector4Value;
using (new EditorGUILayout.VerticalScope())
{
using (new EditorGUI.DisabledScope(!overrideState.boolValue))
DrawWheel(ref value, overrideState.boolValue, (TrackballAttribute)attribute);
DrawLabelAndOverride(title, overrideState);
}
if (m_ResetState)
{
value = Vector4.zero;
m_ResetState = false;
}
property.vector4Value = value;
return true;
}
void DrawWheel(ref Vector4 value, bool overrideState, TrackballAttribute attr)
{
var wheelRect = GUILayoutUtility.GetAspectRect(1f);
float size = wheelRect.width;
float hsize = size / 2f;
float radius = 0.38f * size;
Vector3 hsv;
Color.RGBToHSV(value, out hsv.x, out hsv.y, out hsv.z);
float offset = value.w;
// Thumb
var thumbPos = Vector2.zero;
float theta = hsv.x * (Mathf.PI * 2f);
thumbPos.x = Mathf.Cos(theta + (Mathf.PI / 2f));
thumbPos.y = Mathf.Sin(theta - (Mathf.PI / 2f));
thumbPos *= hsv.y * radius;
// Draw the wheel
if (Event.current.type == EventType.Repaint)
{
// Retina support
float scale = EditorGUIUtility.pixelsPerPoint;
if (s_Material == null)
s_Material = new Material(Shader.Find("Hidden/PostProcessing/Editor/Trackball")) { hideFlags = HideFlags.HideAndDontSave };
// Wheel texture
#if UNITY_2018_1_OR_NEWER
const RenderTextureReadWrite kReadWrite = RenderTextureReadWrite.sRGB;
#else
const RenderTextureReadWrite kReadWrite = RenderTextureReadWrite.Linear;
#endif
var oldRT = RenderTexture.active;
var rt = RenderTexture.GetTemporary((int)(size * scale), (int)(size * scale), 0, RenderTextureFormat.ARGB32, kReadWrite);
s_Material.SetFloat("_Offset", offset);
s_Material.SetFloat("_DisabledState", overrideState ? 1f : 0.5f);
s_Material.SetVector("_Resolution", new Vector2(size * scale, size * scale / 2f));
Graphics.Blit(null, rt, s_Material, EditorGUIUtility.isProSkin ? 0 : 1);
RenderTexture.active = oldRT;
GUI.DrawTexture(wheelRect, rt);
RenderTexture.ReleaseTemporary(rt);
var thumbSize = Styling.wheelThumbSize;
var thumbSizeH = thumbSize / 2f;
Styling.wheelThumb.Draw(new Rect(wheelRect.x + hsize + thumbPos.x - thumbSizeH.x, wheelRect.y + hsize + thumbPos.y - thumbSizeH.y, thumbSize.x, thumbSize.y), false, false, false, false);
}
// Input
var bounds = wheelRect;
bounds.x += hsize - radius;
bounds.y += hsize - radius;
bounds.width = bounds.height = radius * 2f;
hsv = GetInput(bounds, hsv, thumbPos, radius);
value = Color.HSVToRGB(hsv.x, hsv.y, 1f);
value.w = offset;
// Offset
var sliderRect = GUILayoutUtility.GetRect(1f, 17f);
float padding = sliderRect.width * 0.05f; // 5% padding
sliderRect.xMin += padding;
sliderRect.xMax -= padding;
value.w = GUI.HorizontalSlider(sliderRect, value.w, -1f, 1f);
if (attr.mode == TrackballAttribute.Mode.None)
return;
// Values
var displayValue = Vector3.zero;
switch (attr.mode)
{
case TrackballAttribute.Mode.Lift: displayValue = ColorUtilities.ColorToLift(value);
break;
case TrackballAttribute.Mode.Gamma: displayValue = ColorUtilities.ColorToInverseGamma(value);
break;
case TrackballAttribute.Mode.Gain: displayValue = ColorUtilities.ColorToGain(value);
break;
}
using (new EditorGUI.DisabledGroupScope(true))
{
var valuesRect = GUILayoutUtility.GetRect(1f, 17f);
valuesRect.width /= 3f;
GUI.Label(valuesRect, displayValue.x.ToString("F2"), EditorStyles.centeredGreyMiniLabel);
valuesRect.x += valuesRect.width;
GUI.Label(valuesRect, displayValue.y.ToString("F2"), EditorStyles.centeredGreyMiniLabel);
valuesRect.x += valuesRect.width;
GUI.Label(valuesRect, displayValue.z.ToString("F2"), EditorStyles.centeredGreyMiniLabel);
valuesRect.x += valuesRect.width;
}
}
void DrawLabelAndOverride(GUIContent title, SerializedProperty overrideState)
{
// Title
var areaRect = GUILayoutUtility.GetRect(1f, 17f);
var labelSize = Styling.wheelLabel.CalcSize(title);
var labelRect = new Rect(areaRect.x + areaRect.width / 2 - labelSize.x / 2, areaRect.y, labelSize.x, labelSize.y);
GUI.Label(labelRect, title, Styling.wheelLabel);
// Override checkbox
var overrideRect = new Rect(labelRect.x - 17, labelRect.y + 3, 17f, 17f);
EditorUtilities.DrawOverrideCheckbox(overrideRect, overrideState);
}
Vector3 GetInput(Rect bounds, Vector3 hsv, Vector2 thumbPos, float radius)
{
var e = Event.current;
var id = GUIUtility.GetControlID(k_ThumbHash, FocusType.Passive, bounds);
var mousePos = e.mousePosition;
if (e.type == EventType.MouseDown && GUIUtility.hotControl == 0 && bounds.Contains(mousePos))
{
if (e.button == 0)
{
var center = new Vector2(bounds.x + radius, bounds.y + radius);
float dist = Vector2.Distance(center, mousePos);
if (dist <= radius)
{
e.Use();
m_CursorPos = new Vector2(thumbPos.x + radius, thumbPos.y + radius);
GUIUtility.hotControl = id;
GUI.changed = true;
}
}
else if (e.button == 1)
{
e.Use();
GUI.changed = true;
m_ResetState = true;
}
}
else if (e.type == EventType.MouseDrag && e.button == 0 && GUIUtility.hotControl == id)
{
e.Use();
GUI.changed = true;
m_CursorPos += e.delta * GlobalSettings.trackballSensitivity;
GetWheelHueSaturation(m_CursorPos.x, m_CursorPos.y, radius, out hsv.x, out hsv.y);
}
else if (e.rawType == EventType.MouseUp && e.button == 0 && GUIUtility.hotControl == id)
{
e.Use();
GUIUtility.hotControl = 0;
}
return hsv;
}
void GetWheelHueSaturation(float x, float y, float radius, out float hue, out float saturation)
{
float dx = (x - radius) / radius;
float dy = (y - radius) / radius;
float d = Mathf.Sqrt(dx * dx + dy * dy);
hue = Mathf.Atan2(dx, -dy);
hue = 1f - ((hue > 0) ? hue : (Mathf.PI * 2f) + hue) / (Mathf.PI * 2f);
saturation = Mathf.Clamp01(d);
}
}
}
| 210 |
orrb | openai | C# | using UnityEngine;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
[PostProcessEditor(typeof(AmbientOcclusion))]
public sealed class AmbientOcclusionEditor : PostProcessEffectEditor<AmbientOcclusion>
{
SerializedParameterOverride m_Mode;
SerializedParameterOverride m_Intensity;
SerializedParameterOverride m_Color;
SerializedParameterOverride m_AmbientOnly;
SerializedParameterOverride m_ThicknessModifier;
SerializedParameterOverride m_DirectLightingStrength;
SerializedParameterOverride m_Quality;
SerializedParameterOverride m_Radius;
public override void OnEnable()
{
m_Mode = FindParameterOverride(x => x.mode);
m_Intensity = FindParameterOverride(x => x.intensity);
m_Color = FindParameterOverride(x => x.color);
m_AmbientOnly = FindParameterOverride(x => x.ambientOnly);
m_ThicknessModifier = FindParameterOverride(x => x.thicknessModifier);
m_DirectLightingStrength = FindParameterOverride(x => x.directLightingStrength);
m_Quality = FindParameterOverride(x => x.quality);
m_Radius = FindParameterOverride(x => x.radius);
}
public override void OnInspectorGUI()
{
PropertyField(m_Mode);
int aoMode = m_Mode.value.intValue;
if (RuntimeUtilities.scriptableRenderPipelineActive && aoMode == (int)AmbientOcclusionMode.ScalableAmbientObscurance)
{
EditorGUILayout.HelpBox("Scalable ambient obscurance doesn't work with scriptable render pipelines.", MessageType.Warning);
return;
}
#if !UNITY_2017_1_OR_NEWER
if (aoMode == (int)AmbientOcclusionMode.MultiScaleVolumetricObscurance)
{
EditorGUILayout.HelpBox("Multi-scale volumetric obscurance requires Unity 2017.1 or more.", MessageType.Warning);
return;
}
#endif
PropertyField(m_Intensity);
if (aoMode == (int)AmbientOcclusionMode.ScalableAmbientObscurance)
{
PropertyField(m_Radius);
PropertyField(m_Quality);
}
else if (aoMode == (int)AmbientOcclusionMode.MultiScaleVolumetricObscurance)
{
if (!SystemInfo.supportsComputeShaders)
EditorGUILayout.HelpBox("Multi-scale volumetric obscurance requires compute shader support.", MessageType.Warning);
PropertyField(m_ThicknessModifier);
if (RuntimeUtilities.scriptableRenderPipelineActive)
PropertyField(m_DirectLightingStrength);
}
PropertyField(m_Color);
if (Camera.main != null && Camera.main.actualRenderingPath == RenderingPath.DeferredShading && Camera.main.allowHDR)
PropertyField(m_AmbientOnly);
}
}
}
| 74 |
orrb | openai | C# | using UnityEngine;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
[PostProcessEditor(typeof(AutoExposure))]
public sealed class AutoExposureEditor : PostProcessEffectEditor<AutoExposure>
{
SerializedParameterOverride m_Filtering;
SerializedParameterOverride m_MinLuminance;
SerializedParameterOverride m_MaxLuminance;
SerializedParameterOverride m_KeyValue;
SerializedParameterOverride m_EyeAdaptation;
SerializedParameterOverride m_SpeedUp;
SerializedParameterOverride m_SpeedDown;
public override void OnEnable()
{
m_Filtering = FindParameterOverride(x => x.filtering);
m_MinLuminance = FindParameterOverride(x => x.minLuminance);
m_MaxLuminance = FindParameterOverride(x => x.maxLuminance);
m_KeyValue = FindParameterOverride(x => x.keyValue);
m_EyeAdaptation = FindParameterOverride(x => x.eyeAdaptation);
m_SpeedUp = FindParameterOverride(x => x.speedUp);
m_SpeedDown = FindParameterOverride(x => x.speedDown);
}
public override void OnInspectorGUI()
{
if (!SystemInfo.supportsComputeShaders)
EditorGUILayout.HelpBox("Auto exposure requires compute shader support.", MessageType.Warning);
EditorUtilities.DrawHeaderLabel("Exposure");
PropertyField(m_Filtering);
PropertyField(m_MinLuminance);
PropertyField(m_MaxLuminance);
// Clamp min/max adaptation values
float minLum = m_MinLuminance.value.floatValue;
float maxLum = m_MaxLuminance.value.floatValue;
m_MinLuminance.value.floatValue = Mathf.Min(minLum, maxLum);
m_MaxLuminance.value.floatValue = Mathf.Max(minLum, maxLum);
PropertyField(m_KeyValue);
EditorGUILayout.Space();
EditorUtilities.DrawHeaderLabel("Adaptation");
PropertyField(m_EyeAdaptation);
if (m_EyeAdaptation.value.intValue == (int)EyeAdaptation.Progressive)
{
PropertyField(m_SpeedUp);
PropertyField(m_SpeedDown);
}
}
}
}
| 64 |
orrb | openai | C# | using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
[PostProcessEditor(typeof(Bloom))]
public sealed class BloomEditor : PostProcessEffectEditor<Bloom>
{
SerializedParameterOverride m_Intensity;
SerializedParameterOverride m_Threshold;
SerializedParameterOverride m_SoftKnee;
SerializedParameterOverride m_Diffusion;
SerializedParameterOverride m_AnamorphicRatio;
SerializedParameterOverride m_Color;
SerializedParameterOverride m_MobileOptimized;
SerializedParameterOverride m_DirtTexture;
SerializedParameterOverride m_DirtIntensity;
public override void OnEnable()
{
m_Intensity = FindParameterOverride(x => x.intensity);
m_Threshold = FindParameterOverride(x => x.threshold);
m_SoftKnee = FindParameterOverride(x => x.softKnee);
m_Diffusion = FindParameterOverride(x => x.diffusion);
m_AnamorphicRatio = FindParameterOverride(x => x.anamorphicRatio);
m_Color = FindParameterOverride(x => x.color);
m_MobileOptimized = FindParameterOverride(x => x.fastMode);
m_DirtTexture = FindParameterOverride(x => x.dirtTexture);
m_DirtIntensity = FindParameterOverride(x => x.dirtIntensity);
}
public override void OnInspectorGUI()
{
EditorUtilities.DrawHeaderLabel("Bloom");
PropertyField(m_Intensity);
PropertyField(m_Threshold);
PropertyField(m_SoftKnee);
PropertyField(m_Diffusion);
PropertyField(m_AnamorphicRatio);
PropertyField(m_Color);
PropertyField(m_MobileOptimized);
EditorGUILayout.Space();
EditorUtilities.DrawHeaderLabel("Dirtiness");
PropertyField(m_DirtTexture);
PropertyField(m_DirtIntensity);
if (RuntimeUtilities.isVREnabled)
{
if ((m_DirtIntensity.overrideState.boolValue && m_DirtIntensity.value.floatValue > 0f)
|| (m_DirtTexture.overrideState.boolValue && m_DirtTexture.value.objectReferenceValue != null))
EditorGUILayout.HelpBox("Using a dirt texture in VR is not recommended.", MessageType.Warning);
}
}
}
}
| 60 |
orrb | openai | C# | using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
[PostProcessEditor(typeof(ColorGrading))]
public sealed class ColorGradingEditor : PostProcessEffectEditor<ColorGrading>
{
SerializedParameterOverride m_GradingMode;
static GUIContent[] s_Curves =
{
new GUIContent("Master"),
new GUIContent("Red"),
new GUIContent("Green"),
new GUIContent("Blue"),
new GUIContent("Hue Vs Hue"),
new GUIContent("Hue Vs Sat"),
new GUIContent("Sat Vs Sat"),
new GUIContent("Lum Vs Sat")
};
SerializedParameterOverride m_ExternalLut;
SerializedParameterOverride m_Tonemapper;
SerializedParameterOverride m_ToneCurveToeStrength;
SerializedParameterOverride m_ToneCurveToeLength;
SerializedParameterOverride m_ToneCurveShoulderStrength;
SerializedParameterOverride m_ToneCurveShoulderLength;
SerializedParameterOverride m_ToneCurveShoulderAngle;
SerializedParameterOverride m_ToneCurveGamma;
SerializedParameterOverride m_LdrLut;
SerializedParameterOverride m_Temperature;
SerializedParameterOverride m_Tint;
SerializedParameterOverride m_ColorFilter;
SerializedParameterOverride m_HueShift;
SerializedParameterOverride m_Saturation;
SerializedParameterOverride m_Brightness;
SerializedParameterOverride m_PostExposure;
SerializedParameterOverride m_Contrast;
SerializedParameterOverride m_MixerRedOutRedIn;
SerializedParameterOverride m_MixerRedOutGreenIn;
SerializedParameterOverride m_MixerRedOutBlueIn;
SerializedParameterOverride m_MixerGreenOutRedIn;
SerializedParameterOverride m_MixerGreenOutGreenIn;
SerializedParameterOverride m_MixerGreenOutBlueIn;
SerializedParameterOverride m_MixerBlueOutRedIn;
SerializedParameterOverride m_MixerBlueOutGreenIn;
SerializedParameterOverride m_MixerBlueOutBlueIn;
SerializedParameterOverride m_Lift;
SerializedParameterOverride m_Gamma;
SerializedParameterOverride m_Gain;
SerializedParameterOverride m_MasterCurve;
SerializedParameterOverride m_RedCurve;
SerializedParameterOverride m_GreenCurve;
SerializedParameterOverride m_BlueCurve;
SerializedParameterOverride m_HueVsHueCurve;
SerializedParameterOverride m_HueVsSatCurve;
SerializedParameterOverride m_SatVsSatCurve;
SerializedParameterOverride m_LumVsSatCurve;
// Internal references to the actual animation curves
// Needed for the curve editor
SerializedProperty m_RawMasterCurve;
SerializedProperty m_RawRedCurve;
SerializedProperty m_RawGreenCurve;
SerializedProperty m_RawBlueCurve;
SerializedProperty m_RawHueVsHueCurve;
SerializedProperty m_RawHueVsSatCurve;
SerializedProperty m_RawSatVsSatCurve;
SerializedProperty m_RawLumVsSatCurve;
CurveEditor m_CurveEditor;
Dictionary<SerializedProperty, Color> m_CurveDict;
// Custom tone curve drawing
const int k_CustomToneCurveResolution = 48;
const float k_CustomToneCurveRangeY = 1.025f;
readonly Vector3[] m_RectVertices = new Vector3[4];
readonly Vector3[] m_LineVertices = new Vector3[2];
readonly Vector3[] m_CurveVertices = new Vector3[k_CustomToneCurveResolution];
Rect m_CustomToneCurveRect;
readonly HableCurve m_HableCurve = new HableCurve();
public override void OnEnable()
{
m_GradingMode = FindParameterOverride(x => x.gradingMode);
m_ExternalLut = FindParameterOverride(x => x.externalLut);
m_Tonemapper = FindParameterOverride(x => x.tonemapper);
m_ToneCurveToeStrength = FindParameterOverride(x => x.toneCurveToeStrength);
m_ToneCurveToeLength = FindParameterOverride(x => x.toneCurveToeLength);
m_ToneCurveShoulderStrength = FindParameterOverride(x => x.toneCurveShoulderStrength);
m_ToneCurveShoulderLength = FindParameterOverride(x => x.toneCurveShoulderLength);
m_ToneCurveShoulderAngle = FindParameterOverride(x => x.toneCurveShoulderAngle);
m_ToneCurveGamma = FindParameterOverride(x => x.toneCurveGamma);
m_LdrLut = FindParameterOverride(x => x.ldrLut);
m_Temperature = FindParameterOverride(x => x.temperature);
m_Tint = FindParameterOverride(x => x.tint);
m_ColorFilter = FindParameterOverride(x => x.colorFilter);
m_HueShift = FindParameterOverride(x => x.hueShift);
m_Saturation = FindParameterOverride(x => x.saturation);
m_Brightness = FindParameterOverride(x => x.brightness);
m_PostExposure = FindParameterOverride(x => x.postExposure);
m_Contrast = FindParameterOverride(x => x.contrast);
m_MixerRedOutRedIn = FindParameterOverride(x => x.mixerRedOutRedIn);
m_MixerRedOutGreenIn = FindParameterOverride(x => x.mixerRedOutGreenIn);
m_MixerRedOutBlueIn = FindParameterOverride(x => x.mixerRedOutBlueIn);
m_MixerGreenOutRedIn = FindParameterOverride(x => x.mixerGreenOutRedIn);
m_MixerGreenOutGreenIn = FindParameterOverride(x => x.mixerGreenOutGreenIn);
m_MixerGreenOutBlueIn = FindParameterOverride(x => x.mixerGreenOutBlueIn);
m_MixerBlueOutRedIn = FindParameterOverride(x => x.mixerBlueOutRedIn);
m_MixerBlueOutGreenIn = FindParameterOverride(x => x.mixerBlueOutGreenIn);
m_MixerBlueOutBlueIn = FindParameterOverride(x => x.mixerBlueOutBlueIn);
m_Lift = FindParameterOverride(x => x.lift);
m_Gamma = FindParameterOverride(x => x.gamma);
m_Gain = FindParameterOverride(x => x.gain);
m_MasterCurve = FindParameterOverride(x => x.masterCurve);
m_RedCurve = FindParameterOverride(x => x.redCurve);
m_GreenCurve = FindParameterOverride(x => x.greenCurve);
m_BlueCurve = FindParameterOverride(x => x.blueCurve);
m_HueVsHueCurve = FindParameterOverride(x => x.hueVsHueCurve);
m_HueVsSatCurve = FindParameterOverride(x => x.hueVsSatCurve);
m_SatVsSatCurve = FindParameterOverride(x => x.satVsSatCurve);
m_LumVsSatCurve = FindParameterOverride(x => x.lumVsSatCurve);
m_RawMasterCurve = FindProperty(x => x.masterCurve.value.curve);
m_RawRedCurve = FindProperty(x => x.redCurve.value.curve);
m_RawGreenCurve = FindProperty(x => x.greenCurve.value.curve);
m_RawBlueCurve = FindProperty(x => x.blueCurve.value.curve);
m_RawHueVsHueCurve = FindProperty(x => x.hueVsHueCurve.value.curve);
m_RawHueVsSatCurve = FindProperty(x => x.hueVsSatCurve.value.curve);
m_RawSatVsSatCurve = FindProperty(x => x.satVsSatCurve.value.curve);
m_RawLumVsSatCurve = FindProperty(x => x.lumVsSatCurve.value.curve);
m_CurveEditor = new CurveEditor();
m_CurveDict = new Dictionary<SerializedProperty, Color>();
// Prepare the curve editor
SetupCurve(m_RawMasterCurve, new Color(1f, 1f, 1f), 2, false);
SetupCurve(m_RawRedCurve, new Color(1f, 0f, 0f), 2, false);
SetupCurve(m_RawGreenCurve, new Color(0f, 1f, 0f), 2, false);
SetupCurve(m_RawBlueCurve, new Color(0f, 0.5f, 1f), 2, false);
SetupCurve(m_RawHueVsHueCurve, new Color(1f, 1f, 1f), 0, true);
SetupCurve(m_RawHueVsSatCurve, new Color(1f, 1f, 1f), 0, true);
SetupCurve(m_RawSatVsSatCurve, new Color(1f, 1f, 1f), 0, false);
SetupCurve(m_RawLumVsSatCurve, new Color(1f, 1f, 1f), 0, false);
}
public override void OnInspectorGUI()
{
PropertyField(m_GradingMode);
var gradingMode = (GradingMode)m_GradingMode.value.intValue;
// Check if we're in gamma or linear and display a warning if we're trying to do hdr
// color grading while being in gamma mode
if (gradingMode != GradingMode.LowDefinitionRange)
{
if (QualitySettings.activeColorSpace == ColorSpace.Gamma)
EditorGUILayout.HelpBox("ColorSpace in project settings is set to Gamma, HDR color grading won't look correct. Switch to Linear or use LDR color grading mode instead.", MessageType.Warning);
}
if (m_GradingMode.overrideState.boolValue && gradingMode == GradingMode.External)
{
if (!SystemInfo.supports3DRenderTextures || !SystemInfo.supportsComputeShaders)
EditorGUILayout.HelpBox("HDR color grading requires compute shader & 3D render texture support.", MessageType.Warning);
}
if (gradingMode == GradingMode.LowDefinitionRange)
DoStandardModeGUI(false);
else if (gradingMode == GradingMode.HighDefinitionRange)
DoStandardModeGUI(true);
else if (gradingMode == GradingMode.External)
DoExternalModeGUI();
EditorGUILayout.Space();
}
void SetupCurve(SerializedProperty prop, Color color, uint minPointCount, bool loop)
{
var state = CurveEditor.CurveState.defaultState;
state.color = color;
state.visible = false;
state.minPointCount = minPointCount;
state.onlyShowHandlesOnSelection = true;
state.zeroKeyConstantValue = 0.5f;
state.loopInBounds = loop;
m_CurveEditor.Add(prop, state);
m_CurveDict.Add(prop, color);
}
void DoExternalModeGUI()
{
PropertyField(m_ExternalLut);
var lut = m_ExternalLut.value.objectReferenceValue;
if (lut != null)
{
if (lut.GetType() == typeof(Texture3D))
{
var o = (Texture3D)lut;
if (o.width == o.height && o.height == o.depth)
return;
}
else if (lut.GetType() == typeof(RenderTexture))
{
var o = (RenderTexture)lut;
if (o.width == o.height && o.height == o.volumeDepth)
return;
}
EditorGUILayout.HelpBox("Custom LUTs have to be log-encoded 3D textures or 3D render textures with cube format.", MessageType.Warning);
}
}
void DoStandardModeGUI(bool hdr)
{
if (!hdr)
{
PropertyField(m_LdrLut);
var lut = (target as ColorGrading).ldrLut.value;
CheckLutImportSettings(lut);
}
if (hdr)
{
EditorGUILayout.Space();
EditorUtilities.DrawHeaderLabel("Tonemapping");
PropertyField(m_Tonemapper);
if (m_Tonemapper.value.intValue == (int)Tonemapper.Custom)
{
DrawCustomToneCurve();
PropertyField(m_ToneCurveToeStrength);
PropertyField(m_ToneCurveToeLength);
PropertyField(m_ToneCurveShoulderStrength);
PropertyField(m_ToneCurveShoulderLength);
PropertyField(m_ToneCurveShoulderAngle);
PropertyField(m_ToneCurveGamma);
}
}
EditorGUILayout.Space();
EditorUtilities.DrawHeaderLabel("White Balance");
PropertyField(m_Temperature);
PropertyField(m_Tint);
EditorGUILayout.Space();
EditorUtilities.DrawHeaderLabel("Tone");
if (hdr)
PropertyField(m_PostExposure);
PropertyField(m_ColorFilter);
PropertyField(m_HueShift);
PropertyField(m_Saturation);
if (!hdr)
PropertyField(m_Brightness);
PropertyField(m_Contrast);
EditorGUILayout.Space();
int currentChannel = GlobalSettings.currentChannelMixer;
using (new EditorGUILayout.HorizontalScope())
{
EditorGUILayout.PrefixLabel("Channel Mixer", GUIStyle.none, Styling.labelHeader);
EditorGUI.BeginChangeCheck();
{
using (new EditorGUILayout.HorizontalScope())
{
GUILayoutUtility.GetRect(9f, 18f, GUILayout.ExpandWidth(false)); // Dirty hack to do proper right column alignement
if (GUILayout.Toggle(currentChannel == 0, EditorUtilities.GetContent("Red|Red output channel."), EditorStyles.miniButtonLeft)) currentChannel = 0;
if (GUILayout.Toggle(currentChannel == 1, EditorUtilities.GetContent("Green|Green output channel."), EditorStyles.miniButtonMid)) currentChannel = 1;
if (GUILayout.Toggle(currentChannel == 2, EditorUtilities.GetContent("Blue|Blue output channel."), EditorStyles.miniButtonRight)) currentChannel = 2;
}
}
if (EditorGUI.EndChangeCheck())
GUI.FocusControl(null);
}
GlobalSettings.currentChannelMixer = currentChannel;
if (currentChannel == 0)
{
PropertyField(m_MixerRedOutRedIn);
PropertyField(m_MixerRedOutGreenIn);
PropertyField(m_MixerRedOutBlueIn);
}
else if (currentChannel == 1)
{
PropertyField(m_MixerGreenOutRedIn);
PropertyField(m_MixerGreenOutGreenIn);
PropertyField(m_MixerGreenOutBlueIn);
}
else
{
PropertyField(m_MixerBlueOutRedIn);
PropertyField(m_MixerBlueOutGreenIn);
PropertyField(m_MixerBlueOutBlueIn);
}
EditorGUILayout.Space();
EditorUtilities.DrawHeaderLabel("Trackballs");
using (new EditorGUILayout.HorizontalScope())
{
PropertyField(m_Lift);
GUILayout.Space(4f);
PropertyField(m_Gamma);
GUILayout.Space(4f);
PropertyField(m_Gain);
}
EditorGUILayout.Space();
EditorUtilities.DrawHeaderLabel("Grading Curves");
DoCurvesGUI(hdr);
}
void CheckLutImportSettings(Texture lut)
{
if (lut != null)
{
var importer = AssetImporter.GetAtPath(AssetDatabase.GetAssetPath(lut)) as TextureImporter;
// Fails when using an internal texture as you can't change import settings on
// builtin resources, thus the check for null
if (importer != null)
{
bool valid = importer.anisoLevel == 0
&& importer.mipmapEnabled == false
&& importer.sRGBTexture == false
&& importer.textureCompression == TextureImporterCompression.Uncompressed
&& importer.wrapMode == TextureWrapMode.Clamp;
if (!valid)
EditorUtilities.DrawFixMeBox("Invalid LUT import settings.", () => SetLutImportSettings(importer));
}
}
}
void SetLutImportSettings(TextureImporter importer)
{
importer.textureType = TextureImporterType.Default;
importer.mipmapEnabled = false;
importer.anisoLevel = 0;
importer.sRGBTexture = false;
importer.npotScale = TextureImporterNPOTScale.None;
importer.textureCompression = TextureImporterCompression.Uncompressed;
importer.alphaSource = TextureImporterAlphaSource.None;
importer.wrapMode = TextureWrapMode.Clamp;
importer.SaveAndReimport();
AssetDatabase.Refresh();
}
void DrawCustomToneCurve()
{
EditorGUILayout.Space();
// Reserve GUI space
using (new GUILayout.HorizontalScope())
{
GUILayout.Space(EditorGUI.indentLevel * 15f);
m_CustomToneCurveRect = GUILayoutUtility.GetRect(128, 80);
}
if (Event.current.type != EventType.Repaint)
return;
// Prepare curve data
float toeStrength = m_ToneCurveToeStrength.value.floatValue;
float toeLength = m_ToneCurveToeLength.value.floatValue;
float shoulderStrength = m_ToneCurveShoulderStrength.value.floatValue;
float shoulderLength = m_ToneCurveShoulderLength.value.floatValue;
float shoulderAngle = m_ToneCurveShoulderAngle.value.floatValue;
float gamma = m_ToneCurveGamma.value.floatValue;
m_HableCurve.Init(
toeStrength,
toeLength,
shoulderStrength,
shoulderLength,
shoulderAngle,
gamma
);
float endPoint = m_HableCurve.whitePoint;
// Background
m_RectVertices[0] = PointInRect(0f, 0f, endPoint);
m_RectVertices[1] = PointInRect(endPoint, 0f, endPoint);
m_RectVertices[2] = PointInRect(endPoint, k_CustomToneCurveRangeY, endPoint);
m_RectVertices[3] = PointInRect(0f, k_CustomToneCurveRangeY, endPoint);
Handles.DrawSolidRectangleWithOutline(m_RectVertices, Color.white * 0.1f, Color.white * 0.4f);
// Vertical guides
if (endPoint < m_CustomToneCurveRect.width / 3)
{
int steps = Mathf.CeilToInt(endPoint);
for (var i = 1; i < steps; i++)
DrawLine(i, 0, i, k_CustomToneCurveRangeY, 0.4f, endPoint);
}
// Label
Handles.Label(m_CustomToneCurveRect.position + Vector2.right, "Custom Tone Curve", EditorStyles.miniLabel);
// Draw the acual curve
var vcount = 0;
while (vcount < k_CustomToneCurveResolution)
{
float x = endPoint * vcount / (k_CustomToneCurveResolution - 1);
float y = m_HableCurve.Eval(x);
if (y < k_CustomToneCurveRangeY)
{
m_CurveVertices[vcount++] = PointInRect(x, y, endPoint);
}
else
{
if (vcount > 1)
{
// Extend the last segment to the top edge of the rect.
var v1 = m_CurveVertices[vcount - 2];
var v2 = m_CurveVertices[vcount - 1];
var clip = (m_CustomToneCurveRect.y - v1.y) / (v2.y - v1.y);
m_CurveVertices[vcount - 1] = v1 + (v2 - v1) * clip;
}
break;
}
}
if (vcount > 1)
{
Handles.color = Color.white * 0.9f;
Handles.DrawAAPolyLine(2f, vcount, m_CurveVertices);
}
}
void DrawLine(float x1, float y1, float x2, float y2, float grayscale, float rangeX)
{
m_LineVertices[0] = PointInRect(x1, y1, rangeX);
m_LineVertices[1] = PointInRect(x2, y2, rangeX);
Handles.color = Color.white * grayscale;
Handles.DrawAAPolyLine(2f, m_LineVertices);
}
Vector3 PointInRect(float x, float y, float rangeX)
{
x = Mathf.Lerp(m_CustomToneCurveRect.x, m_CustomToneCurveRect.xMax, x / rangeX);
y = Mathf.Lerp(m_CustomToneCurveRect.yMax, m_CustomToneCurveRect.y, y / k_CustomToneCurveRangeY);
return new Vector3(x, y, 0);
}
void ResetVisibleCurves()
{
foreach (var curve in m_CurveDict)
{
var state = m_CurveEditor.GetCurveState(curve.Key);
state.visible = false;
m_CurveEditor.SetCurveState(curve.Key, state);
}
}
void SetCurveVisible(SerializedProperty rawProp, SerializedProperty overrideProp)
{
var state = m_CurveEditor.GetCurveState(rawProp);
state.visible = true;
state.editable = overrideProp.boolValue;
m_CurveEditor.SetCurveState(rawProp, state);
}
void CurveOverrideToggle(SerializedProperty overrideProp)
{
overrideProp.boolValue = GUILayout.Toggle(overrideProp.boolValue, EditorUtilities.GetContent("Override"), EditorStyles.toolbarButton);
}
static Material s_MaterialGrid;
void DoCurvesGUI(bool hdr)
{
EditorGUILayout.Space();
ResetVisibleCurves();
using (new EditorGUI.DisabledGroupScope(serializedObject.isEditingMultipleObjects))
{
int curveEditingId = 0;
SerializedProperty currentCurveRawProp = null;
// Top toolbar
using (new GUILayout.HorizontalScope(EditorStyles.toolbar))
{
curveEditingId = DoCurveSelectionPopup(GlobalSettings.currentCurve, hdr);
curveEditingId = Mathf.Clamp(curveEditingId, hdr ? 4 : 0, 7);
EditorGUILayout.Space();
switch (curveEditingId)
{
case 0:
CurveOverrideToggle(m_MasterCurve.overrideState);
SetCurveVisible(m_RawMasterCurve, m_MasterCurve.overrideState);
currentCurveRawProp = m_RawMasterCurve;
break;
case 1:
CurveOverrideToggle(m_RedCurve.overrideState);
SetCurveVisible(m_RawRedCurve, m_RedCurve.overrideState);
currentCurveRawProp = m_RawRedCurve;
break;
case 2:
CurveOverrideToggle(m_GreenCurve.overrideState);
SetCurveVisible(m_RawGreenCurve, m_GreenCurve.overrideState);
currentCurveRawProp = m_RawGreenCurve;
break;
case 3:
CurveOverrideToggle(m_BlueCurve.overrideState);
SetCurveVisible(m_RawBlueCurve, m_BlueCurve.overrideState);
currentCurveRawProp = m_RawBlueCurve;
break;
case 4:
CurveOverrideToggle(m_HueVsHueCurve.overrideState);
SetCurveVisible(m_RawHueVsHueCurve, m_HueVsHueCurve.overrideState);
currentCurveRawProp = m_RawHueVsHueCurve;
break;
case 5:
CurveOverrideToggle(m_HueVsSatCurve.overrideState);
SetCurveVisible(m_RawHueVsSatCurve, m_HueVsSatCurve.overrideState);
currentCurveRawProp = m_RawHueVsSatCurve;
break;
case 6:
CurveOverrideToggle(m_SatVsSatCurve.overrideState);
SetCurveVisible(m_RawSatVsSatCurve, m_SatVsSatCurve.overrideState);
currentCurveRawProp = m_RawSatVsSatCurve;
break;
case 7:
CurveOverrideToggle(m_LumVsSatCurve.overrideState);
SetCurveVisible(m_RawLumVsSatCurve, m_LumVsSatCurve.overrideState);
currentCurveRawProp = m_RawLumVsSatCurve;
break;
}
GUILayout.FlexibleSpace();
if (GUILayout.Button("Reset", EditorStyles.toolbarButton))
{
switch (curveEditingId)
{
case 0: m_RawMasterCurve.animationCurveValue = AnimationCurve.Linear(0f, 0f, 1f, 1f);
break;
case 1: m_RawRedCurve.animationCurveValue = AnimationCurve.Linear(0f, 0f, 1f, 1f);
break;
case 2: m_RawGreenCurve.animationCurveValue = AnimationCurve.Linear(0f, 0f, 1f, 1f);
break;
case 3: m_RawBlueCurve.animationCurveValue = AnimationCurve.Linear(0f, 0f, 1f, 1f);
break;
case 4: m_RawHueVsHueCurve.animationCurveValue = new AnimationCurve();
break;
case 5: m_RawHueVsSatCurve.animationCurveValue = new AnimationCurve();
break;
case 6: m_RawSatVsSatCurve.animationCurveValue = new AnimationCurve();
break;
case 7: m_RawLumVsSatCurve.animationCurveValue = new AnimationCurve();
break;
}
}
GlobalSettings.currentCurve = curveEditingId;
}
// Curve area
var settings = m_CurveEditor.settings;
var rect = GUILayoutUtility.GetAspectRect(2f);
var innerRect = settings.padding.Remove(rect);
if (Event.current.type == EventType.Repaint)
{
// Background
EditorGUI.DrawRect(rect, new Color(0.15f, 0.15f, 0.15f, 1f));
if (curveEditingId == 4 || curveEditingId == 5)
DrawBackgroundTexture(innerRect, 0);
else if (curveEditingId == 6 || curveEditingId == 7)
DrawBackgroundTexture(innerRect, 1);
// Bounds
Handles.color = Color.white * (GUI.enabled ? 1f : 0.5f);
Handles.DrawSolidRectangleWithOutline(innerRect, Color.clear, new Color(0.8f, 0.8f, 0.8f, 0.5f));
// Grid setup
Handles.color = new Color(1f, 1f, 1f, 0.05f);
int hLines = (int)Mathf.Sqrt(innerRect.width);
int vLines = (int)(hLines / (innerRect.width / innerRect.height));
// Vertical grid
int gridOffset = Mathf.FloorToInt(innerRect.width / hLines);
int gridPadding = ((int)(innerRect.width) % hLines) / 2;
for (int i = 1; i < hLines; i++)
{
var offset = i * Vector2.right * gridOffset;
offset.x += gridPadding;
Handles.DrawLine(innerRect.position + offset, new Vector2(innerRect.x, innerRect.yMax - 1) + offset);
}
// Horizontal grid
gridOffset = Mathf.FloorToInt(innerRect.height / vLines);
gridPadding = ((int)(innerRect.height) % vLines) / 2;
for (int i = 1; i < vLines; i++)
{
var offset = i * Vector2.up * gridOffset;
offset.y += gridPadding;
Handles.DrawLine(innerRect.position + offset, new Vector2(innerRect.xMax - 1, innerRect.y) + offset);
}
}
// Curve editor
if (m_CurveEditor.OnGUI(rect))
{
Repaint();
GUI.changed = true;
}
if (Event.current.type == EventType.Repaint)
{
// Borders
Handles.color = Color.black;
Handles.DrawLine(new Vector2(rect.x, rect.y - 18f), new Vector2(rect.xMax, rect.y - 18f));
Handles.DrawLine(new Vector2(rect.x, rect.y - 19f), new Vector2(rect.x, rect.yMax));
Handles.DrawLine(new Vector2(rect.x, rect.yMax), new Vector2(rect.xMax, rect.yMax));
Handles.DrawLine(new Vector2(rect.xMax, rect.yMax), new Vector2(rect.xMax, rect.y - 18f));
bool editable = m_CurveEditor.GetCurveState(currentCurveRawProp).editable;
string editableString = editable ? string.Empty : "(Not Overriding)\n";
// Selection info
var selection = m_CurveEditor.GetSelection();
var infoRect = innerRect;
infoRect.x += 5f;
infoRect.width = 100f;
infoRect.height = 30f;
if (selection.curve != null && selection.keyframeIndex > -1)
{
var key = selection.keyframe.Value;
GUI.Label(infoRect, string.Format("{0}\n{1}", key.time.ToString("F3"), key.value.ToString("F3")), Styling.preLabel);
}
else
{
GUI.Label(infoRect, editableString, Styling.preLabel);
}
}
}
}
void DrawBackgroundTexture(Rect rect, int pass)
{
if (s_MaterialGrid == null)
s_MaterialGrid = new Material(Shader.Find("Hidden/PostProcessing/Editor/CurveGrid")) { hideFlags = HideFlags.HideAndDontSave };
float scale = EditorGUIUtility.pixelsPerPoint;
#if UNITY_2018_1_OR_NEWER
const RenderTextureReadWrite kReadWrite = RenderTextureReadWrite.sRGB;
#else
const RenderTextureReadWrite kReadWrite = RenderTextureReadWrite.Linear;
#endif
var oldRt = RenderTexture.active;
var rt = RenderTexture.GetTemporary(Mathf.CeilToInt(rect.width * scale), Mathf.CeilToInt(rect.height * scale), 0, RenderTextureFormat.ARGB32, kReadWrite);
s_MaterialGrid.SetFloat("_DisabledState", GUI.enabled ? 1f : 0.5f);
s_MaterialGrid.SetFloat("_PixelScaling", EditorGUIUtility.pixelsPerPoint);
Graphics.Blit(null, rt, s_MaterialGrid, pass);
RenderTexture.active = oldRt;
GUI.DrawTexture(rect, rt);
RenderTexture.ReleaseTemporary(rt);
}
int DoCurveSelectionPopup(int id, bool hdr)
{
GUILayout.Label(s_Curves[id], EditorStyles.toolbarPopup, GUILayout.MaxWidth(150f));
var lastRect = GUILayoutUtility.GetLastRect();
var e = Event.current;
if (e.type == EventType.MouseDown && e.button == 0 && lastRect.Contains(e.mousePosition))
{
var menu = new GenericMenu();
for (int i = 0; i < s_Curves.Length; i++)
{
if (i == 4)
menu.AddSeparator("");
if (hdr && i < 4)
menu.AddDisabledItem(s_Curves[i]);
else
{
int current = i; // Capture local for closure
menu.AddItem(s_Curves[i], current == id, () => GlobalSettings.currentCurve = current);
}
}
menu.DropDown(new Rect(lastRect.xMin, lastRect.yMax, 1f, 1f));
}
return id;
}
}
}
| 739 |
orrb | openai | C# | using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
public class DefaultPostProcessEffectEditor : PostProcessEffectBaseEditor
{
List<SerializedParameterOverride> m_Parameters;
public override void OnEnable()
{
m_Parameters = new List<SerializedParameterOverride>();
var fields = target.GetType()
.GetFields(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic)
.Where(t => t.FieldType.IsSubclassOf(typeof(ParameterOverride)) && t.Name != "enabled")
.Where(t =>
(t.IsPublic && t.GetCustomAttributes(typeof(NonSerializedAttribute), false).Length == 0)
|| (t.GetCustomAttributes(typeof(UnityEngine.SerializeField), false).Length > 0)
)
.ToList();
foreach (var field in fields)
{
var property = serializedObject.FindProperty(field.Name);
var attributes = field.GetCustomAttributes(false).Cast<Attribute>().ToArray();
var parameter = new SerializedParameterOverride(property, attributes);
m_Parameters.Add(parameter);
}
}
public override void OnInspectorGUI()
{
foreach (var parameter in m_Parameters)
PropertyField(parameter);
}
}
}
| 42 |
orrb | openai | C# | using UnityEngine;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
[PostProcessEditor(typeof(DepthOfField))]
public sealed class DepthOfFieldEditor : PostProcessEffectEditor<DepthOfField>
{
SerializedParameterOverride m_FocusDistance;
SerializedParameterOverride m_Aperture;
SerializedParameterOverride m_FocalLength;
SerializedParameterOverride m_KernelSize;
public override void OnEnable()
{
m_FocusDistance = FindParameterOverride(x => x.focusDistance);
m_Aperture = FindParameterOverride(x => x.aperture);
m_FocalLength = FindParameterOverride(x => x.focalLength);
m_KernelSize = FindParameterOverride(x => x.kernelSize);
}
public override void OnInspectorGUI()
{
if (SystemInfo.graphicsShaderLevel < 35)
EditorGUILayout.HelpBox("Depth Of Field is only supported on the following platforms:\nDX11+, OpenGL 3.2+, OpenGL ES 3+, Metal, Vulkan, PS4/XB1 consoles.", MessageType.Warning);
PropertyField(m_FocusDistance);
PropertyField(m_Aperture);
PropertyField(m_FocalLength);
PropertyField(m_KernelSize);
}
}
}
| 34 |
orrb | openai | C# | using UnityEngine;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
[PostProcessEditor(typeof(ScreenSpaceReflections))]
public sealed class ScreenSpaceReflectionsEditor : PostProcessEffectEditor<ScreenSpaceReflections>
{
SerializedParameterOverride m_Preset;
SerializedParameterOverride m_MaximumIterationCount;
SerializedParameterOverride m_Thickness;
SerializedParameterOverride m_Resolution;
SerializedParameterOverride m_MaximumMarchDistance;
SerializedParameterOverride m_DistanceFade;
SerializedParameterOverride m_Vignette;
public override void OnEnable()
{
m_Preset = FindParameterOverride(x => x.preset);
m_MaximumIterationCount = FindParameterOverride(x => x.maximumIterationCount);
m_Thickness = FindParameterOverride(x => x.thickness);
m_Resolution = FindParameterOverride(x => x.resolution);
m_MaximumMarchDistance = FindParameterOverride(x => x.maximumMarchDistance);
m_DistanceFade = FindParameterOverride(x => x.distanceFade);
m_Vignette = FindParameterOverride(x => x.vignette);
}
public override void OnInspectorGUI()
{
if (RuntimeUtilities.scriptableRenderPipelineActive)
{
EditorGUILayout.HelpBox("This effect doesn't work with scriptable render pipelines yet.", MessageType.Warning);
return;
}
if (Camera.main != null && Camera.main.actualRenderingPath != RenderingPath.DeferredShading)
EditorGUILayout.HelpBox("This effect only works with the deferred rendering path.", MessageType.Warning);
if (!SystemInfo.supportsComputeShaders)
EditorGUILayout.HelpBox("This effect requires compute shader support.", MessageType.Warning);
PropertyField(m_Preset);
if (m_Preset.value.intValue == (int)ScreenSpaceReflectionPreset.Custom)
{
PropertyField(m_MaximumIterationCount);
PropertyField(m_Thickness);
PropertyField(m_Resolution);
EditorGUILayout.Space();
}
PropertyField(m_MaximumMarchDistance);
PropertyField(m_DistanceFade);
PropertyField(m_Vignette);
}
}
}
| 59 |
orrb | openai | C# | using UnityEngine;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEditor.Rendering.PostProcessing
{
[PostProcessEditor(typeof(Vignette))]
public sealed class VignetteEditor : PostProcessEffectEditor<Vignette>
{
SerializedParameterOverride m_Mode;
SerializedParameterOverride m_Color;
SerializedParameterOverride m_Center;
SerializedParameterOverride m_Intensity;
SerializedParameterOverride m_Smoothness;
SerializedParameterOverride m_Roundness;
SerializedParameterOverride m_Rounded;
SerializedParameterOverride m_Mask;
SerializedParameterOverride m_Opacity;
public override void OnEnable()
{
m_Mode = FindParameterOverride(x => x.mode);
m_Color = FindParameterOverride(x => x.color);
m_Center = FindParameterOverride(x => x.center);
m_Intensity = FindParameterOverride(x => x.intensity);
m_Smoothness = FindParameterOverride(x => x.smoothness);
m_Roundness = FindParameterOverride(x => x.roundness);
m_Rounded = FindParameterOverride(x => x.rounded);
m_Mask = FindParameterOverride(x => x.mask);
m_Opacity = FindParameterOverride(x => x.opacity);
}
public override void OnInspectorGUI()
{
PropertyField(m_Mode);
PropertyField(m_Color);
if (m_Mode.value.intValue == (int)VignetteMode.Classic)
{
PropertyField(m_Center);
PropertyField(m_Intensity);
PropertyField(m_Smoothness);
PropertyField(m_Roundness);
PropertyField(m_Rounded);
}
else
{
PropertyField(m_Mask);
var mask = (target as Vignette).mask.value;
// Checks import settings on the mask
if (mask != null)
{
var importer = AssetImporter.GetAtPath(AssetDatabase.GetAssetPath(mask)) as TextureImporter;
// Fails when using an internal texture as you can't change import settings on
// builtin resources, thus the check for null
if (importer != null)
{
bool valid = importer.anisoLevel == 0
&& importer.mipmapEnabled == false
&& importer.alphaSource == TextureImporterAlphaSource.FromGrayScale
&& importer.textureCompression == TextureImporterCompression.Uncompressed
&& importer.wrapMode == TextureWrapMode.Clamp;
if (!valid)
EditorUtilities.DrawFixMeBox("Invalid mask import settings.", () => SetMaskImportSettings(importer));
}
}
PropertyField(m_Opacity);
}
}
void SetMaskImportSettings(TextureImporter importer)
{
importer.textureType = TextureImporterType.SingleChannel;
importer.alphaSource = TextureImporterAlphaSource.FromGrayScale;
importer.textureCompression = TextureImporterCompression.Uncompressed;
importer.anisoLevel = 0;
importer.mipmapEnabled = false;
importer.wrapMode = TextureWrapMode.Clamp;
importer.SaveAndReimport();
AssetDatabase.Refresh();
}
}
}
| 92 |
Subsets and Splits