diff --git a/docs/Getting-Started-with-Balance-Ball.md b/docs/Getting-Started-with-Balance-Ball.md index 5b18726a32..9ccb34a51c 100644 --- a/docs/Getting-Started-with-Balance-Ball.md +++ b/docs/Getting-Started-with-Balance-Ball.md @@ -269,7 +269,7 @@ on the same graph. To summarize, go to your command line, enter the `ml-agents` directory and type: -```python +``` python3 python/learn.py --run-id= --train ``` **Note**: If you're using Anaconda, don't forget to activate the ml-agents environment first. diff --git a/docs/Learning-Environment-Examples.md b/docs/Learning-Environment-Examples.md index ecb55318eb..ece6fb4932 100644 --- a/docs/Learning-Environment-Examples.md +++ b/docs/Learning-Environment-Examples.md @@ -154,15 +154,15 @@ If you would like to contribute environments, please see our ![Banana](images/banana.png) * Set-up: A multi-agent environment where agents compete to collect bananas. -* Goal: The agents must learn to move to as many yellow bananas as possible while avoiding red bananas. -* Agents: The environment contains 10 agents linked to a single brain. +* Goal: The agents must learn to move to as many yellow bananas as possible while avoiding blue bananas. +* Agents: The environment contains 5 agents linked to a single brain. * Agent Reward Function (independent): * +1 for interaction with yellow banana - * -1 for interaction with red banana. + * -1 for interaction with blue banana. * Brains: One brain with the following observation/action space. - * Vector Observation space: (Continuous) 51 corresponding to velocity of agent, plus ray-based perception of objects around agent's forward direction. + * Vector Observation space: (Continuous) 53 corresponding to velocity of agent (2), whether agent is frozen and/or shot its laser (2), plus ray-based perception of objects around agent's forward direction (49; 7 raycast angles with 7 measurements for each). * Vector Action space: (Continuous) Size of 3, corresponding to forward movement, y-axis rotation, and whether to use laser to disable other agents. - * Visual Observations (Optional): First-person view for each agent. + * Visual Observations (Optional; None by default): First-person view for each agent. * Reset Parameters: None ## Hallway diff --git a/unity-environment/Assets/ML-Agents/Examples/BananaCollectors/BananaRL.unity b/unity-environment/Assets/ML-Agents/Examples/BananaCollectors/BananaRL.unity index 2e49e62fd4..4636cea625 100644 --- a/unity-environment/Assets/ML-Agents/Examples/BananaCollectors/BananaRL.unity +++ b/unity-environment/Assets/ML-Agents/Examples/BananaCollectors/BananaRL.unity @@ -13,7 +13,7 @@ OcclusionCullingSettings: --- !u!104 &2 RenderSettings: m_ObjectHideFlags: 0 - serializedVersion: 9 + serializedVersion: 8 m_Fog: 0 m_FogColor: {r: 0.5, g: 0.5, b: 0.5, a: 1} m_FogMode: 3 @@ -39,12 +39,11 @@ RenderSettings: m_CustomReflection: {fileID: 0} m_Sun: {fileID: 0} m_IndirectSpecularColor: {r: 0, g: 0, b: 0, a: 1} - m_UseRadianceAmbientProbe: 0 --- !u!157 &3 LightmapSettings: m_ObjectHideFlags: 0 serializedVersion: 11 - m_GIWorkflowMode: 0 + m_GIWorkflowMode: 1 m_GISettings: serializedVersion: 2 m_BounceScale: 1 @@ -55,10 +54,11 @@ LightmapSettings: m_EnableBakedLightmaps: 1 m_EnableRealtimeLightmaps: 1 m_LightmapEditorSettings: - serializedVersion: 10 + serializedVersion: 9 m_Resolution: 2 m_BakeResolution: 40 - m_AtlasSize: 1024 + m_TextureWidth: 1024 + m_TextureHeight: 1024 m_AO: 1 m_AOMaxDistance: 1 m_CompAOExponent: 1 @@ -678,8 +678,13 @@ Prefab: objectReference: {fileID: 0} - target: {fileID: 1819751139121548, guid: 38400a68c4ea54b52998e34ee238d1a7, type: 2} propertyPath: m_IsActive - value: 0 + value: 1 objectReference: {fileID: 0} + - target: {fileID: 114508049814297234, guid: 38400a68c4ea54b52998e34ee238d1a7, + type: 2} + propertyPath: myAcademyObj + value: + objectReference: {fileID: 1574236047} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 38400a68c4ea54b52998e34ee238d1a7, type: 2} m_IsPrefabParent: 0 @@ -776,8 +781,13 @@ Prefab: objectReference: {fileID: 0} - target: {fileID: 1819751139121548, guid: 38400a68c4ea54b52998e34ee238d1a7, type: 2} propertyPath: m_IsActive - value: 0 + value: 1 objectReference: {fileID: 0} + - target: {fileID: 114508049814297234, guid: 38400a68c4ea54b52998e34ee238d1a7, + type: 2} + propertyPath: myAcademyObj + value: + objectReference: {fileID: 1574236047} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 38400a68c4ea54b52998e34ee238d1a7, type: 2} m_IsPrefabParent: 0 @@ -841,7 +851,6 @@ Camera: m_TargetEye: 3 m_HDR: 1 m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 m_ForceIntoRT: 1 m_OcclusionCulling: 1 m_StereoConvergence: 10 @@ -1204,8 +1213,13 @@ Prefab: objectReference: {fileID: 0} - target: {fileID: 1819751139121548, guid: 38400a68c4ea54b52998e34ee238d1a7, type: 2} propertyPath: m_IsActive - value: 0 + value: 1 objectReference: {fileID: 0} + - target: {fileID: 114508049814297234, guid: 38400a68c4ea54b52998e34ee238d1a7, + type: 2} + propertyPath: myAcademyObj + value: + objectReference: {fileID: 1574236047} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 38400a68c4ea54b52998e34ee238d1a7, type: 2} m_IsPrefabParent: 0