Skip to content

ISensor interface and use for visual observations #2731

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 38 commits into from
Oct 22, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
cc77ac2
ISensor and SensorBase
Oct 11, 2019
96c2569
camera and rendertex first pass
Oct 12, 2019
3c6105a
use isensors for visual obs
Oct 14, 2019
f79aca8
Update gridworld with CameraSensors
Oct 14, 2019
d3ca506
compressed obs for reals
Oct 14, 2019
8cfdd04
Remove AgentInfo.visualObservations
Oct 15, 2019
1925a14
Merge remote-tracking branch 'origin/develop' into develop-sensor-int…
Oct 15, 2019
fd9c3fc
better separation of train and inference sensor calls
Oct 15, 2019
523a6da
compressed obs proto - need CI to generate code
Oct 15, 2019
2850d75
int32
Oct 15, 2019
aee1b06
get proto name right
Oct 15, 2019
7741db5
run protoc locally for new fiels
Oct 16, 2019
c619ac1
apply generated proto patch (pyi files were weird)
Oct 16, 2019
3f5ea37
don't repeat bytes
Oct 16, 2019
bdff9f7
hook up compressedobs
Oct 16, 2019
7b9c288
dont send BrainParameters until there's an AgentInfo
Oct 17, 2019
80b5520
python BrainParameters now needs an AgentInfo to create
Oct 17, 2019
539c481
remove last (I hope) dependency on camerares
Oct 18, 2019
f2bfa6f
remove CameraResolutions and AgentInfo.visual_observations
Oct 18, 2019
d32c06e
update mypy-protobuf version
Oct 18, 2019
21af53d
cleanup todos
Oct 18, 2019
2642edc
python cleanup
Oct 18, 2019
a85bea3
more unit test fixes
Oct 18, 2019
ab844b4
more unit test fix
Oct 18, 2019
ad5ed81
camera sensors for VisualFood collector, record demo
Oct 18, 2019
ae9e20f
SensorComponent
Oct 18, 2019
506a48b
Merge remote-tracking branch 'origin/develop' into develop-sensor-int…
Oct 18, 2019
1b18813
timers, rename Sensor fields, GetName interface
Oct 21, 2019
8e19577
sort sensors, add test
Oct 21, 2019
a2be183
remove empty test
Oct 21, 2019
44533a5
revert ProjectVersion.txt change
Oct 21, 2019
e0937d6
fix demo recording
Oct 21, 2019
0f1c949
remove AgentParameters cameras and textures
Oct 21, 2019
5285691
PR feedback, use RenderTexture sensor in GridWorld
Oct 21, 2019
c64ea28
update scenes to use Camera sensors
Oct 21, 2019
2b8a409
Add comments on future of GenerateSensorData
Oct 21, 2019
7cf5332
make SensorComponent and subclasses public
Oct 22, 2019
15c04ac
#if DEBUG around checks
Oct 22, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 0 additions & 51 deletions UnitySDK/Assets/ML-Agents/Editor/AgentEditor.cs
Original file line number Diff line number Diff line change
Expand Up @@ -25,60 +25,9 @@ public override void OnInspectorGUI()
"agentParameters.resetOnDone");
var isOdd = serializedAgent.FindProperty(
"agentParameters.onDemandDecision");
var cameras = serializedAgent.FindProperty(
"agentParameters.agentCameras");
var renderTextures = serializedAgent.FindProperty(
"agentParameters.agentRenderTextures");

EditorGUILayout.PropertyField(brain);

if (cameras.arraySize > 0 && renderTextures.arraySize > 0)
{
EditorGUILayout.HelpBox("Brain visual observations created by first getting all cameras then all render textures.", MessageType.Info);
}

EditorGUILayout.LabelField("Agent Cameras");
for (var i = 0; i < cameras.arraySize; i++)
{
EditorGUILayout.PropertyField(
cameras.GetArrayElementAtIndex(i),
new GUIContent("Camera " + (i + 1) + ": "));
}

EditorGUILayout.BeginHorizontal();
if (GUILayout.Button("Add Camera", EditorStyles.miniButton))
{
cameras.arraySize++;
}

if (GUILayout.Button("Remove Camera", EditorStyles.miniButton))
{
cameras.arraySize--;
}

EditorGUILayout.EndHorizontal();

EditorGUILayout.LabelField("Agent RenderTextures");
for (var i = 0; i < renderTextures.arraySize; i++)
{
EditorGUILayout.PropertyField(
renderTextures.GetArrayElementAtIndex(i),
new GUIContent("RenderTexture " + (i + 1) + ": "));
}

EditorGUILayout.BeginHorizontal();
if (GUILayout.Button("Add RenderTextures", EditorStyles.miniButton))
{
renderTextures.arraySize++;
}

if (GUILayout.Button("Remove RenderTextures", EditorStyles.miniButton))
{
renderTextures.arraySize--;
}

EditorGUILayout.EndHorizontal();


EditorGUILayout.PropertyField(
maxSteps,
Expand Down
113 changes: 0 additions & 113 deletions UnitySDK/Assets/ML-Agents/Editor/BrainParametersDrawer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,11 @@ public class BrainParametersDrawer : PropertyDrawer
// The height of a line in the Unity Inspectors
private const float k_LineHeight = 17f;
private const int k_VecObsNumLine = 3;
private const string k_CamResPropName = "cameraResolutions";
private const string k_ActionSizePropName = "vectorActionSize";
private const string k_ActionTypePropName = "vectorActionSpaceType";
private const string k_ActionDescriptionPropName = "vectorActionDescriptions";
private const string k_VecObsPropName = "vectorObservationSize";
private const string k_NumVecObsPropName = "numStackedVectorObservations";
private const string k_CamWidthPropName = "width";
private const string k_CamHeightPropName = "height";
private const string k_CamGrayPropName = "blackAndWhite";
private const int k_DefaultCameraWidth = 84;
private const int k_DefaultCameraHeight = 84;
private const bool k_DefaultCameraGray = false;

/// <inheritdoc />
public override float GetPropertyHeight(SerializedProperty property, GUIContent label)
Expand All @@ -33,7 +26,6 @@ public override float GetPropertyHeight(SerializedProperty property, GUIContent
{
return k_LineHeight +
GetHeightDrawVectorObservation() +
GetHeightDrawVisualObservation(property) +
GetHeightDrawVectorAction(property) +
GetHeightDrawVectorActionDescriptions(property);
}
Expand All @@ -57,10 +49,6 @@ public override void OnGUI(Rect position, SerializedProperty property, GUIConten
DrawVectorObservation(position, property);
position.y += GetHeightDrawVectorObservation();

//Visual Observations
DrawVisualObservations(position, property);
position.y += GetHeightDrawVisualObservation(property);

// Vector Action
DrawVectorAction(position, property);
position.y += GetHeightDrawVectorAction(property);
Expand Down Expand Up @@ -111,107 +99,6 @@ private static float GetHeightDrawVectorObservation()
return k_VecObsNumLine * k_LineHeight;
}

/// <summary>
/// Draws the Visual Observations parameters for the Brain Parameters
/// </summary>
/// <param name="position">Rectangle on the screen to use for the property GUI.</param>
/// <param name="property">The SerializedProperty of the BrainParameters
/// to make the custom GUI for.</param>
private static void DrawVisualObservations(Rect position, SerializedProperty property)
{
EditorGUI.LabelField(position, "Visual Observations");
position.y += k_LineHeight;
var quarter = position.width / 4;
var resolutions = property.FindPropertyRelative(k_CamResPropName);
DrawVisualObsButtons(position, resolutions);
position.y += k_LineHeight;

// Display the labels for the columns : Index, Width, Height and Gray
var indexRect = new Rect(position.x, position.y, quarter, position.height);
var widthRect = new Rect(position.x + quarter, position.y, quarter, position.height);
var heightRect = new Rect(position.x + 2 * quarter, position.y, quarter, position.height);
var bwRect = new Rect(position.x + 3 * quarter, position.y, quarter, position.height);
EditorGUI.indentLevel++;
if (resolutions.arraySize > 0)
{
EditorGUI.LabelField(indexRect, "Index");
indexRect.y += k_LineHeight;
EditorGUI.LabelField(widthRect, "Width");
widthRect.y += k_LineHeight;
EditorGUI.LabelField(heightRect, "Height");
heightRect.y += k_LineHeight;
EditorGUI.LabelField(bwRect, "Gray");
bwRect.y += k_LineHeight;
}

// Iterate over the resolutions
for (var i = 0; i < resolutions.arraySize; i++)
{
EditorGUI.LabelField(indexRect, "Obs " + i);
indexRect.y += k_LineHeight;
var res = resolutions.GetArrayElementAtIndex(i);
var w = res.FindPropertyRelative("width");
w.intValue = EditorGUI.IntField(widthRect, w.intValue);
widthRect.y += k_LineHeight;
var h = res.FindPropertyRelative("height");
h.intValue = EditorGUI.IntField(heightRect, h.intValue);
heightRect.y += k_LineHeight;
var bw = res.FindPropertyRelative("blackAndWhite");
bw.boolValue = EditorGUI.Toggle(bwRect, bw.boolValue);
bwRect.y += k_LineHeight;
}
EditorGUI.indentLevel--;
}

/// <summary>
/// Draws the buttons to add and remove the visual observations parameters
/// </summary>
/// <param name="position">Rectangle on the screen to use for the property GUI.</param>
/// <param name="resolutions">The SerializedProperty of the resolution array
/// to make the custom GUI for.</param>
private static void DrawVisualObsButtons(Rect position, SerializedProperty resolutions)
{
var widthEighth = position.width / 8;
var addButtonRect = new Rect(position.x + widthEighth, position.y,
3 * widthEighth, position.height);
var removeButtonRect = new Rect(position.x + 4 * widthEighth, position.y,
3 * widthEighth, position.height);
if (resolutions.arraySize == 0)
{
addButtonRect.width *= 2;
}
// Display the buttons
if (GUI.Button(addButtonRect, "Add New", EditorStyles.miniButton))
{
resolutions.arraySize += 1;
var newRes = resolutions.GetArrayElementAtIndex(resolutions.arraySize - 1);
newRes.FindPropertyRelative(k_CamWidthPropName).intValue = k_DefaultCameraWidth;
newRes.FindPropertyRelative(k_CamHeightPropName).intValue = k_DefaultCameraHeight;
newRes.FindPropertyRelative(k_CamGrayPropName).boolValue = k_DefaultCameraGray;
}
if (resolutions.arraySize > 0)
{
if (GUI.Button(removeButtonRect, "Remove Last", EditorStyles.miniButton))
{
resolutions.arraySize -= 1;
}
}
}

/// <summary>
/// The Height required to draw the Visual Observations parameters
/// </summary>
/// <returns>The height of the drawer of the Visual Observations </returns>
private static float GetHeightDrawVisualObservation(SerializedProperty property)
{
var visObsSize = property.FindPropertyRelative(k_CamResPropName).arraySize + 2;
if (property.FindPropertyRelative(k_CamResPropName).arraySize > 0)
{
visObsSize += 1;
}
return k_LineHeight * visObsSize;
}

/// <summary>
/// Draws the Vector Actions parameters for the Brain Parameters
/// </summary>
Expand Down
30 changes: 0 additions & 30 deletions UnitySDK/Assets/ML-Agents/Editor/DemonstrationDrawer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -59,33 +59,6 @@ static string BuildActionArrayLabel(SerializedProperty actionSizeProperty)
return actionLabel.ToString();
}

/// <summary>
/// Constructs complex label for each CameraResolution object.
/// An example of this could be `[ 84 X 84 ]`
/// for a single camera with 84 pixels height and width.
/// </summary>
private static string BuildCameraResolutionLabel(SerializedProperty cameraArray)
{
var numCameras = cameraArray.arraySize;
var cameraLabel = new StringBuilder("[ ");
for (var i = 0; i < numCameras; i++)
{
var camHeightPropName =
cameraArray.GetArrayElementAtIndex(i).FindPropertyRelative("height");
cameraLabel.Append(camHeightPropName.intValue);
cameraLabel.Append(" X ");
var camWidthPropName =
cameraArray.GetArrayElementAtIndex(i).FindPropertyRelative("width");
cameraLabel.Append(camWidthPropName.intValue);
if (i < numCameras - 1)
{
cameraLabel.Append(", ");
}
}

cameraLabel.Append(" ]");
return cameraLabel.ToString();
}

/// <summary>
/// Renders Inspector UI for Brain Parameters of Demonstration.
Expand All @@ -95,21 +68,18 @@ void MakeBrainParametersProperty(SerializedProperty property)
var vecObsSizeProp = property.FindPropertyRelative("vectorObservationSize");
var numStackedProp = property.FindPropertyRelative("numStackedVectorObservations");
var actSizeProperty = property.FindPropertyRelative("vectorActionSize");
var camResProp = property.FindPropertyRelative("cameraResolutions");
var actSpaceTypeProp = property.FindPropertyRelative("vectorActionSpaceType");

var vecObsSizeLabel = vecObsSizeProp.displayName + ": " + vecObsSizeProp.intValue;
var numStackedLabel = numStackedProp.displayName + ": " + numStackedProp.intValue;
var vecActSizeLabel =
actSizeProperty.displayName + ": " + BuildActionArrayLabel(actSizeProperty);
var camResLabel = camResProp.displayName + ": " + BuildCameraResolutionLabel(camResProp);
var actSpaceTypeLabel = actSpaceTypeProp.displayName + ": " +
(SpaceType)actSpaceTypeProp.enumValueIndex;

EditorGUILayout.LabelField(vecObsSizeLabel);
EditorGUILayout.LabelField(numStackedLabel);
EditorGUILayout.LabelField(vecActSizeLabel);
EditorGUILayout.LabelField(camResLabel);
EditorGUILayout.LabelField(actSpaceTypeLabel);
}

Expand Down
2 changes: 0 additions & 2 deletions UnitySDK/Assets/ML-Agents/Editor/Tests/DemonstrationTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ public void TestStoreInitalize()
{
vectorObservationSize = 3,
numStackedVectorObservations = 2,
cameraResolutions = new[] {new Resolution()},
vectorActionDescriptions = new[] {"TestActionA", "TestActionB"},
vectorActionSize = new[] {2, 2},
vectorActionSpaceType = SpaceType.Discrete
Expand All @@ -47,7 +46,6 @@ public void TestStoreInitalize()
var agentInfo = new AgentInfo
{
reward = 1f,
visualObservations = new List<Texture2D>(),
actionMasks = new[] {false, true},
done = true,
id = 5,
Expand Down
48 changes: 44 additions & 4 deletions UnitySDK/Assets/ML-Agents/Editor/Tests/MLAgentsEditModeTest.cs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
using UnityEngine;
using NUnit.Framework;
using System.Reflection;
using MLAgents.Sensor;
using MLAgents.InferenceBrain;

namespace MLAgents.Tests
{
Expand Down Expand Up @@ -33,6 +35,13 @@ public class TestAgent : Agent
public override void InitializeAgent()
{
initializeAgentCalls += 1;

// Add in some custom sensors so we can confirm they get sorted as expected.
var sensor1 = new TestSensor("testsensor1");
var sensor2 = new TestSensor("testsensor2");

m_Sensors.Add(sensor2);
m_Sensors.Add(sensor1);
}

public override void CollectObservations()
Expand All @@ -57,6 +66,37 @@ public override void AgentOnDone()
}
}

public class TestSensor : ISensor
{
public string sensorName;

public TestSensor(string n)
{
sensorName = n;
}

public int[] GetFloatObservationShape() {
return new[] {1};
}

public void WriteToTensor(TensorProxy tensorProxy, int agentIndex) { }

public byte[] GetCompressedObservation()
{
return null;
}

public CompressionType GetCompressionType()
{
return CompressionType.None;
}

public string GetName()
{
return sensorName;
}
}

// This is an empty class for testing the behavior of agents and academy
// It is left empty because we are not testing any brain behavior
public class TestBrain : Brain
Expand Down Expand Up @@ -177,6 +217,10 @@ public void TestAgent()
Assert.AreEqual(1, agent2.initializeAgentCalls);
Assert.AreEqual(0, agent1.agentActionCalls);
Assert.AreEqual(0, agent2.agentActionCalls);

// Make sure the sensors were sorted
Assert.AreEqual(agent1.m_Sensors[0].GetName(), "testsensor1");
Assert.AreEqual(agent1.m_Sensors[1].GetName(), "testsensor2");
}
}

Expand Down Expand Up @@ -244,7 +288,6 @@ public void TestAgent()
agent2.agentParameters.onDemandDecision = true;
// agent2 will request decisions only when RequestDecision is called
brain.brainParameters.vectorObservationSize = 0;
brain.brainParameters.cameraResolutions = new Resolution[0];
agent1.GiveBrain(brain);
agent2.GiveBrain(brain);

Expand Down Expand Up @@ -370,7 +413,6 @@ public void TestAgent()
agent2.agentParameters.onDemandDecision = true;
// agent2 will request decisions only when RequestDecision is called
brain.brainParameters.vectorObservationSize = 0;
brain.brainParameters.cameraResolutions = new Resolution[0];
agent1.GiveBrain(brain);
agent2.GiveBrain(brain);

Expand Down Expand Up @@ -487,7 +529,6 @@ public void TestResetOnDone()
agent1.agentParameters.resetOnDone = false;
agent2.agentParameters.resetOnDone = false;
brain.brainParameters.vectorObservationSize = 0;
brain.brainParameters.cameraResolutions = new Resolution[0];
agent1.GiveBrain(brain);
agent2.GiveBrain(brain);

Expand Down Expand Up @@ -567,7 +608,6 @@ public void TestCumulativeReward()
// agent2 will request decisions only when RequestDecision is called
agent1.agentParameters.maxStep = 20;
brain.brainParameters.vectorObservationSize = 0;
brain.brainParameters.cameraResolutions = new Resolution[0];
agent1.GiveBrain(brain);
agent2.GiveBrain(brain);

Expand Down
Loading