From f35f9182ca59ff24c865ba26675177914adf4cfe Mon Sep 17 00:00:00 2001 From: Andrey Gumirov Date: Wed, 13 Apr 2022 14:12:40 +0700 Subject: [PATCH 01/27] Added debug log and a few todos --- Assets/ML-Agents/Timers/Greatest_map_ever_timers.json | 2 +- Assets/Scripts/Character/MovementController.cs | 2 +- Assets/Scripts/Character/NPC.cs | 1 + ProjectSettings/ProjectVersion.txt | 4 ++-- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json index 259eaa5..dee8d64 100755 --- a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json +++ b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json @@ -1 +1 @@ -{"count":1,"self":27.008124799999997,"total":28.0164227,"children":{"InitializeActuators":{"count":1,"self":0.0030001,"total":0.0030001,"children":null},"InitializeSensors":{"count":1,"self":0.0020001,"total":0.0020001,"children":null},"AgentSendState":{"count":1148,"self":0.0250195,"total":0.40993029999999997,"children":{"CollectObservations":{"count":1148,"self":0.3739037,"total":0.3739037,"children":null},"WriteActionMask":{"count":1148,"self":0.0050046,"total":0.0050046,"children":null},"RequestDecision":{"count":1148,"self":0.0060025,"total":0.0060025,"children":null}}},"DecideAction":{"count":1148,"self":0.33184349999999996,"total":0.33184349999999996,"children":null},"AgentAct":{"count":1148,"self":0.2595281,"total":0.2595281,"children":null}},"gauges":{"NPC.CumulativeReward":{"count":11,"max":0,"min":0,"runningAverage":0,"value":0,"weightedAverage":0}},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1649822158","unity_version":"2019.4.35f1","command_line_arguments":"C:\\Program Files\\unityeditorfolder\\2019.4.35f1\\Editor\\Unity.exe -projectpath F:\\SigmaRiskManagment\\real shooter Git Version -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-Leonid_Krazer -hubSessionId dca9b0c0-ba6e-11ec-ba13-6d2f32043678 -accessToken h2wulrZC-_wNB02rFLc8VoUWrSoRJ5zUGCjS9AOIxNY009f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"Greatest_map_ever","end_time_seconds":"1649822186"}} \ No newline at end of file +{"count":1,"self":9.6690519999999989,"total":9.830432,"children":{"InitializeActuators":{"count":1,"self":0.0072819999999999994,"total":0.0072819999999999994,"children":null},"InitializeSensors":{"count":1,"self":0.0050279999999999995,"total":0.0050279999999999995,"children":null},"AgentSendState":{"count":291,"self":0.015906,"total":0.12215899999999999,"children":{"CollectObservations":{"count":291,"self":0.094488,"total":0.094488,"children":null},"WriteActionMask":{"count":291,"self":0.003096,"total":0.003096,"children":null},"RequestDecision":{"count":291,"self":0.008669,"total":0.008669,"children":null}}},"DecideAction":{"count":291,"self":0.010034999999999999,"total":0.010034999999999999,"children":null},"AgentAct":{"count":291,"self":0.01038,"total":0.01038,"children":null}},"gauges":{"NPC.CumulativeReward":{"count":2,"max":0,"min":0,"runningAverage":0,"value":0,"weightedAverage":0}},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1649833912","unity_version":"2019.4.36f1","command_line_arguments":"\/Applications\/Unity\/Hub\/Editor\/2019.4.36f1\/Unity.app\/Contents\/MacOS\/Unity -projectpath \/Users\/gav\/tmp\/projects\/real_shooter -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-gav -hubSessionId 00bae3b0-baf3-11ec-825c-d33a32cd27c6 -accessToken lsL7HWaJbQqgjzAcs45yqgSpYnYbj_1WIJQNy6b-hEg009f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"Greatest_map_ever","end_time_seconds":"1649833921"}} \ No newline at end of file diff --git a/Assets/Scripts/Character/MovementController.cs b/Assets/Scripts/Character/MovementController.cs index e45bf60..9fffed3 100755 --- a/Assets/Scripts/Character/MovementController.cs +++ b/Assets/Scripts/Character/MovementController.cs @@ -26,7 +26,7 @@ public class MovementController : MonoBehaviour goToNextNavPoint(MapManager.navPoints[Random.Range(0, MapManager.navPoints.Count)]); } - private NavPoint getPointCandidate() + private NavPoint getPointCandidate() // todo тут нужно, чтобы выдавался массив точек { var NavPointsPositions = MapManager.navPoints .Select(point => point.transform.position) diff --git a/Assets/Scripts/Character/NPC.cs b/Assets/Scripts/Character/NPC.cs index 776c42f..f474558 100755 --- a/Assets/Scripts/Character/NPC.cs +++ b/Assets/Scripts/Character/NPC.cs @@ -32,6 +32,7 @@ public class NPC : Agent sensor.AddObservation(Condition.ArmourPoints); sensor.AddObservation(Condition.Ammunition); sensor.AddObservation((int)Condition.npcState); + Debug.Log("Sensors: " + sensor); } public override void Heuristic(in ActionBuffers actionsOut) diff --git a/ProjectSettings/ProjectVersion.txt b/ProjectSettings/ProjectVersion.txt index 89a11d7..bf8f7eb 100755 --- a/ProjectSettings/ProjectVersion.txt +++ b/ProjectSettings/ProjectVersion.txt @@ -1,2 +1,2 @@ -m_EditorVersion: 2019.4.35f1 -m_EditorVersionWithRevision: 2019.4.35f1 (0462406dff2e) +m_EditorVersion: 2019.4.36f1 +m_EditorVersionWithRevision: 2019.4.36f1 (660c164b2fc5) From 29fec74bd40e168f19400ed1f67de2456bb62308 Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Mon, 18 Apr 2022 09:34:08 +0700 Subject: [PATCH 02/27] latest changes --- .../Timers/Greatest_map_ever_timers.json | 2 +- Assets/Prefabs/Bot.prefab | 92 +- Assets/Prefabs/DragonFucker.prefab | 2744 ----------------- Assets/Prefabs/DragonFucker.prefab.meta | 7 - Assets/Prefabs/Player.prefab | 498 +-- .../Greatest_map_ever/Greatest_map_ever.meta | 8 - .../Greatest_map_ever/Greatest_map_ever.unity | 488 +-- .../Greatest_map_ever/NavMesh.asset | Bin 11248 -> 0 bytes .../Greatest_map_ever/NavMesh.asset.meta | 8 - Assets/Scenes/Greatest_map_ever/NavMesh.asset | Bin 9596 -> 11248 bytes .../Greatest_map_ever/NavMesh.asset.meta | 2 +- Assets/Scenes/tyt player.meta | 8 - Assets/Scenes/tyt player/Player.prefab | 510 --- Assets/Scenes/tyt player/Player.prefab.meta | 7 - Assets/Scripts/Bots/CharacterFactory.cs | 59 + ...ooler.cs.meta => CharacterFactory.cs.meta} | 0 Assets/Scripts/Bots/CharacterPooler.cs | 4 - Assets/Scripts/Character/Character.cs | 22 + Assets/Scripts/Character/Character.cs.meta | 11 + .../Scripts/Character/CharacterCondition.cs | 9 - .../Scripts/Character/MovementController.cs | 16 +- Assets/Scripts/Character/NPC.cs | 74 +- Assets/Scripts/Character/NPC_State.cs | 46 + Assets/Scripts/Character/NPC_State.cs.meta | 11 + Assets/Scripts/Character/Player.cs | 34 + Assets/Scripts/Character/Player.cs.meta | 11 + Assets/Scripts/Managers/GameManager.cs | 35 +- Assets/Scripts/Managers/MapManager.cs | 6 +- Assets/Scripts/Managers/TimeManager.cs | 6 +- Assets/Scripts/Misc/NavPoint.cs | 19 +- Assets/Scripts/Misc/Settings.cs | 7 + Assets/Scripts/Pickups/PickUpSpawner.cs | 8 + Assets/Settings/Game Settings.asset | 12 +- Docs/api.md | 0 ProjectSettings/ProjectSettings.asset | 3 +- 35 files changed, 742 insertions(+), 4025 deletions(-) delete mode 100644 Assets/Prefabs/DragonFucker.prefab delete mode 100644 Assets/Prefabs/DragonFucker.prefab.meta delete mode 100644 Assets/Scenes/Greatest_map_ever/Greatest_map_ever.meta delete mode 100644 Assets/Scenes/Greatest_map_ever/Greatest_map_ever/NavMesh.asset delete mode 100644 Assets/Scenes/Greatest_map_ever/Greatest_map_ever/NavMesh.asset.meta delete mode 100755 Assets/Scenes/tyt player.meta delete mode 100755 Assets/Scenes/tyt player/Player.prefab delete mode 100755 Assets/Scenes/tyt player/Player.prefab.meta create mode 100644 Assets/Scripts/Bots/CharacterFactory.cs rename Assets/Scripts/Bots/{CharacterPooler.cs.meta => CharacterFactory.cs.meta} (100%) mode change 100755 => 100644 delete mode 100755 Assets/Scripts/Bots/CharacterPooler.cs create mode 100644 Assets/Scripts/Character/Character.cs create mode 100644 Assets/Scripts/Character/Character.cs.meta create mode 100644 Assets/Scripts/Character/NPC_State.cs create mode 100644 Assets/Scripts/Character/NPC_State.cs.meta create mode 100644 Assets/Scripts/Character/Player.cs create mode 100644 Assets/Scripts/Character/Player.cs.meta create mode 100644 Docs/api.md diff --git a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json index 259eaa5..3ed5e46 100755 --- a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json +++ b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json @@ -1 +1 @@ -{"count":1,"self":27.008124799999997,"total":28.0164227,"children":{"InitializeActuators":{"count":1,"self":0.0030001,"total":0.0030001,"children":null},"InitializeSensors":{"count":1,"self":0.0020001,"total":0.0020001,"children":null},"AgentSendState":{"count":1148,"self":0.0250195,"total":0.40993029999999997,"children":{"CollectObservations":{"count":1148,"self":0.3739037,"total":0.3739037,"children":null},"WriteActionMask":{"count":1148,"self":0.0050046,"total":0.0050046,"children":null},"RequestDecision":{"count":1148,"self":0.0060025,"total":0.0060025,"children":null}}},"DecideAction":{"count":1148,"self":0.33184349999999996,"total":0.33184349999999996,"children":null},"AgentAct":{"count":1148,"self":0.2595281,"total":0.2595281,"children":null}},"gauges":{"NPC.CumulativeReward":{"count":11,"max":0,"min":0,"runningAverage":0,"value":0,"weightedAverage":0}},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1649822158","unity_version":"2019.4.35f1","command_line_arguments":"C:\\Program Files\\unityeditorfolder\\2019.4.35f1\\Editor\\Unity.exe -projectpath F:\\SigmaRiskManagment\\real shooter Git Version -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-Leonid_Krazer -hubSessionId dca9b0c0-ba6e-11ec-ba13-6d2f32043678 -accessToken h2wulrZC-_wNB02rFLc8VoUWrSoRJ5zUGCjS9AOIxNY009f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"Greatest_map_ever","end_time_seconds":"1649822186"}} \ No newline at end of file +{"count":1,"self":16.677081599999998,"total":16.6850819,"children":{"InitializeActuators":{"count":1,"self":0.0020011,"total":0.0020011,"children":null},"InitializeSensors":{"count":1,"self":0.0019998,"total":0.0019998,"children":null},"AgentSendState":{"count":1,"self":0.0020001,"total":0.0030001,"children":{"CollectObservations":{"count":1,"self":0.001,"total":0.001,"children":null}}}},"gauges":{},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1650121997","unity_version":"2019.4.35f1","command_line_arguments":"C:\\Program Files\\unityeditorfolder\\2019.4.35f1\\Editor\\Unity.exe -projectpath F:\\SigmaRiskManagment\\real shooter Git Version -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-BipJTP5Uk-fN-2Lss2TUJ -hubSessionId a1fe6f50-bcbd-11ec-b648-df8ff5c97a4c -accessToken K_G64_ace_5LgpX3riGrgGS92ESag4RH04Ti1lswMX8009f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"Greatest_map_ever","end_time_seconds":"1650122013"}} \ No newline at end of file diff --git a/Assets/Prefabs/Bot.prefab b/Assets/Prefabs/Bot.prefab index af42c97..c15183f 100755 --- a/Assets/Prefabs/Bot.prefab +++ b/Assets/Prefabs/Bot.prefab @@ -13,6 +13,10 @@ GameObject: - component: {fileID: 2988578997639256868} - component: {fileID: 2988578997639256875} - component: {fileID: 5447337162552783061} + - component: {fileID: 7805954453358028498} + - component: {fileID: 2676446634235362783} + - component: {fileID: 8656710265340117963} + - component: {fileID: 778652956973742106} m_Layer: 0 m_Name: Bot m_TagString: Untagged @@ -107,7 +111,87 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: a6f2a081cfc8c4b4bb6864331109d147, type: 3} m_Name: m_EditorClassIdentifier: - HealthPoints: 100 - Armour: 100 - Ammunition: 360 - LastTimeHit: 0 + agentParameters: + maxStep: 0 + hasUpgradedFromAgentParameters: 1 + MaxStep: 100 + moveController: {fileID: 778652956973742106} +--- !u!114 &7805954453358028498 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2988578997639256874} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 3a5c9d521e5ef4759a8246a07d52221e, type: 3} + m_Name: + m_EditorClassIdentifier: + DecisionPeriod: 1 + TakeActionsBetweenDecisions: 1 +--- !u!114 &2676446634235362783 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2988578997639256874} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + VectorObservationSize: 1 + NumStackedVectorObservations: 1 + m_ActionSpec: + m_NumContinuousActions: 0 + BranchSizes: 01000000 + VectorActionSize: 01000000 + VectorActionDescriptions: [] + VectorActionSpaceType: 0 + hasUpgradedBrainParametersWithActionSpec: 1 + m_Model: {fileID: 0} + m_InferenceDevice: 0 + m_BehaviorType: 0 + m_BehaviorName: My Behavior + TeamId: 0 + m_UseChildSensors: 1 + m_UseChildActuators: 1 + m_ObservableAttributeHandling: 0 +--- !u!195 &8656710265340117963 +NavMeshAgent: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2988578997639256874} + m_Enabled: 1 + m_AgentTypeID: 0 + m_Radius: 0.5 + m_Speed: 3.5 + m_Acceleration: 8 + avoidancePriority: 50 + m_AngularSpeed: 120 + m_StoppingDistance: 0 + m_AutoTraverseOffMeshLink: 1 + m_AutoBraking: 1 + m_AutoRepath: 1 + m_Height: 2 + m_BaseOffset: 1 + m_WalkableMask: 4294967295 + m_ObstacleAvoidanceType: 4 +--- !u!114 &778652956973742106 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2988578997639256874} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: d3ebcf807a37f344998fd648dfc9376d, type: 3} + m_Name: + m_EditorClassIdentifier: + navMeshAgent: {fileID: 8656710265340117963} diff --git a/Assets/Prefabs/DragonFucker.prefab b/Assets/Prefabs/DragonFucker.prefab deleted file mode 100644 index bc71ab1..0000000 --- a/Assets/Prefabs/DragonFucker.prefab +++ /dev/null @@ -1,2744 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!1 &3344194988163342465 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054817} - m_Layer: 0 - m_Name: Index_Distal_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054817 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342465} - m_LocalRotation: {x: 0.012835801, y: 0.013473716, z: 0.08711711, w: 0.99602425} - m_LocalPosition: {x: -7.9785438, y: 1.7053026e-13, z: 4.440892e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054821} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342467 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054819} - m_Layer: 0 - m_Name: Index_Distal_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054819 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342467} - m_LocalRotation: {x: -0.013842603, y: -0.01243883, z: 0.16401216, w: 0.98628277} - m_LocalPosition: {x: -7.9783425, y: 5.684342e-14, z: -1.7763568e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054823} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342469 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054821} - m_Layer: 0 - m_Name: Index_Intermediate_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054821 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342469} - m_LocalRotation: {x: -0.017963478, y: -0.041126773, z: 0.25885317, w: 0.96487355} - m_LocalPosition: {x: -10.023805, y: -1.4210855e-13, z: -1.7763568e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054817} - m_Father: {fileID: 3344194988163054841} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342471 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054823} - m_Layer: 0 - m_Name: Index_Intermediate_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054823 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342471} - m_LocalRotation: {x: 0.017964862, y: 0.041127786, z: 0.25889415, w: 0.96486247} - m_LocalPosition: {x: -10.023557, y: -1.4210855e-13, z: 8.881784e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054819} - m_Father: {fileID: 3344194988163054843} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342473 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054825} - m_Layer: 0 - m_Name: Headgear_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054825 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342473} - m_LocalRotation: {x: 0.47540557, y: -0.44884732, z: 0.5234402, w: 0.54638463} - m_LocalPosition: {x: -8.526513e-14, y: -6.550316e-15, z: -13.487081} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054805} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342475 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054827} - - component: {fileID: 3344194988174912995} - m_Layer: 0 - m_Name: head1 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054827 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342475} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: -176.9416, z: -6.1230974} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 3344194988163054729} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!137 &3344194988174912995 -SkinnedMeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342475} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 0 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 2100000, guid: 1666fb81395f51e4293b01cca8f58481, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 - serializedVersion: 2 - m_Quality: 0 - m_UpdateWhenOffscreen: 0 - m_SkinnedMotionVectors: 1 - m_Mesh: {fileID: 4300000, guid: 5ae220fa0f79ffe41b0102a618febf5a, type: 3} - m_Bones: - - {fileID: 3344194988163054829} - - {fileID: 3344194988163054763} - - {fileID: 3344194988163054815} - - {fileID: 3344194988163054731} - - {fileID: 3344194988163054805} - - {fileID: 3344194988163054825} - - {fileID: 3344194988163054831} - - {fileID: 3344194988163054747} - - {fileID: 3344194988163054771} - - {fileID: 3344194988163054845} - - {fileID: 3344194988163054801} - - {fileID: 3344194988163054727} - - {fileID: 3344194988163054723} - - {fileID: 3344194988163054735} - - {fileID: 3344194988163054843} - - {fileID: 3344194988163054823} - - {fileID: 3344194988163054819} - - {fileID: 3344194988163054753} - - {fileID: 3344194988163054765} - - {fileID: 3344194988163054761} - - {fileID: 3344194988163054743} - - {fileID: 3344194988163054739} - - {fileID: 3344194988163054751} - - {fileID: 3344194988163054745} - - {fileID: 3344194988163054769} - - {fileID: 3344194988163054835} - - {fileID: 3344194988163054807} - - {fileID: 3344194988163054725} - - {fileID: 3344194988163054721} - - {fileID: 3344194988163054733} - - {fileID: 3344194988163054841} - - {fileID: 3344194988163054821} - - {fileID: 3344194988163054817} - - {fileID: 3344194988163054759} - - {fileID: 3344194988163054755} - - {fileID: 3344194988163054767} - - {fileID: 3344194988163054741} - - {fileID: 3344194988163054737} - - {fileID: 3344194988163054749} - - {fileID: 3344194988163054789} - - {fileID: 3344194988163054793} - - {fileID: 3344194988163054797} - - {fileID: 3344194988163054799} - - {fileID: 3344194988163054775} - - {fileID: 3344194988163054833} - - {fileID: 3344194988163054813} - - {fileID: 3344194988163054757} - - {fileID: 3344194988163054777} - - {fileID: 3344194988163054773} - - {fileID: 3344194988163054839} - - {fileID: 3344194988163054803} - - {fileID: 3344194988163054779} - - {fileID: 3344194988163054783} - - {fileID: 3344194988163054785} - - {fileID: 3344194988163054837} - - {fileID: 3344194988163054781} - - {fileID: 3344194988163054787} - m_BlendShapeWeights: [] - m_RootBone: {fileID: 3344194988163054829} - m_AABB: - m_Center: {x: -71.52329, y: 10.616158, z: -1.0935726} - m_Extent: {x: 26.513672, y: 26.529358, z: 17.550735} - m_DirtyAABB: 0 ---- !u!1 &3344194988163342477 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054829} - m_Layer: 0 - m_Name: Hips - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054829 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342477} - m_LocalRotation: {x: -0.4582423, y: 0.5385295, z: -0.4582423, w: 0.5385295} - m_LocalPosition: {x: -0, y: 1.0650933, z: 0.038611155} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: - - {fileID: 3344194988163054787} - - {fileID: 3344194988163054785} - - {fileID: 3344194988163054763} - - {fileID: 3344194988163054775} - - {fileID: 3344194988163054773} - m_Father: {fileID: 3344194988163054729} - m_RootOrder: 5 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342479 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054831} - m_Layer: 0 - m_Name: Headgear_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054831 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342479} - m_LocalRotation: {x: -0.5234401, y: 0.5463846, z: 0.4754055, w: 0.44884726} - m_LocalPosition: {x: 1.3088212, y: 3.871166, z: 12.853171} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054805} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342481 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054833} - m_Layer: 0 - m_Name: LowerLeg_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054833 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342481} - m_LocalRotation: {x: -0.6640286, y: -0.051458772, z: -0.042404156, w: 0.74472815} - m_LocalPosition: {x: -33.436848, y: -2.4868996e-14, z: 6.217249e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054813} - m_Father: {fileID: 3344194988163054775} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342483 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054835} - m_Layer: 0 - m_Name: LowerArm_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054835 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342483} - m_LocalRotation: {x: 4.9720758e-29, y: -0.049585804, z: 1.0024346e-27, w: 0.9987699} - m_LocalPosition: {x: -33.666927, y: 0, z: -1.4432899e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054807} - m_Father: {fileID: 3344194988163054769} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342485 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054837} - m_Layer: 0 - m_Name: magazine_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054837 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342485} - m_LocalRotation: {x: -0.4881349, y: 0.5173824, z: 0.48743477, w: 0.5064061} - m_LocalPosition: {x: -0.004472253, y: -40.147858, z: 0.03500123} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054785} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342487 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054839} - m_Layer: 0 - m_Name: LowerLeg_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054839 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342487} - m_LocalRotation: {x: 0.66088396, y: 0.05163587, z: -0.04218814, w: 0.74752015} - m_LocalPosition: {x: -33.437313, y: 4.973799e-14, z: -8.881784e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054803} - m_Father: {fileID: 3344194988163054773} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342489 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054841} - m_Layer: 0 - m_Name: Index_Proximal_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054841 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342489} - m_LocalRotation: {x: -0.0028644414, y: -0.042128764, z: 0.08165133, w: 0.99576604} - m_LocalPosition: {x: -13.357139, y: -0.09103328, z: -13.440055} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054821} - m_Father: {fileID: 3344194988163054807} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342491 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054843} - m_Layer: 0 - m_Name: Index_Proximal_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054843 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342491} - m_LocalRotation: {x: 0.0028642316, y: 0.04212831, z: 0.08163239, w: 0.9957676} - m_LocalPosition: {x: -13.357445, y: -0.09144714, z: 13.440094} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054823} - m_Father: {fileID: 3344194988163054801} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342493 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054845} - m_Layer: 0 - m_Name: LowerArm_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054845 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342493} - m_LocalRotation: {x: -1.11818e-29, y: 0.049585894, z: 1.0904029e-28, w: 0.9987699} - m_LocalPosition: {x: -33.66694, y: 2.842171e-14, z: 4.2188475e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054801} - m_Father: {fileID: 3344194988163054771} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342495 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054847} - - component: {fileID: 3344194988174912993} - m_Layer: 0 - m_Name: Leg1 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054847 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342495} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: -96.69518, z: -8.197696} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 3344194988163054729} - m_RootOrder: 6 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!137 &3344194988174912993 -SkinnedMeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342495} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 0 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 2100000, guid: 1666fb81395f51e4293b01cca8f58481, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 - serializedVersion: 2 - m_Quality: 0 - m_UpdateWhenOffscreen: 0 - m_SkinnedMotionVectors: 1 - m_Mesh: {fileID: 4300008, guid: 5ae220fa0f79ffe41b0102a618febf5a, type: 3} - m_Bones: - - {fileID: 3344194988163054829} - - {fileID: 3344194988163054763} - - {fileID: 3344194988163054815} - - {fileID: 3344194988163054731} - - {fileID: 3344194988163054805} - - {fileID: 3344194988163054825} - - {fileID: 3344194988163054831} - - {fileID: 3344194988163054747} - - {fileID: 3344194988163054771} - - {fileID: 3344194988163054845} - - {fileID: 3344194988163054801} - - {fileID: 3344194988163054727} - - {fileID: 3344194988163054723} - - {fileID: 3344194988163054735} - - {fileID: 3344194988163054843} - - {fileID: 3344194988163054823} - - {fileID: 3344194988163054819} - - {fileID: 3344194988163054753} - - {fileID: 3344194988163054765} - - {fileID: 3344194988163054761} - - {fileID: 3344194988163054743} - - {fileID: 3344194988163054739} - - {fileID: 3344194988163054751} - - {fileID: 3344194988163054745} - - {fileID: 3344194988163054769} - - {fileID: 3344194988163054835} - - {fileID: 3344194988163054807} - - {fileID: 3344194988163054725} - - {fileID: 3344194988163054721} - - {fileID: 3344194988163054733} - - {fileID: 3344194988163054841} - - {fileID: 3344194988163054821} - - {fileID: 3344194988163054817} - - {fileID: 3344194988163054759} - - {fileID: 3344194988163054755} - - {fileID: 3344194988163054767} - - {fileID: 3344194988163054741} - - {fileID: 3344194988163054737} - - {fileID: 3344194988163054749} - - {fileID: 3344194988163054789} - - {fileID: 3344194988163054793} - - {fileID: 3344194988163054797} - - {fileID: 3344194988163054799} - - {fileID: 3344194988163054775} - - {fileID: 3344194988163054833} - - {fileID: 3344194988163054813} - - {fileID: 3344194988163054757} - - {fileID: 3344194988163054777} - - {fileID: 3344194988163054773} - - {fileID: 3344194988163054839} - - {fileID: 3344194988163054803} - - {fileID: 3344194988163054779} - - {fileID: 3344194988163054783} - - {fileID: 3344194988163054785} - - {fileID: 3344194988163054837} - - {fileID: 3344194988163054781} - - {fileID: 3344194988163054787} - m_BlendShapeWeights: [] - m_RootBone: {fileID: 3344194988163054829} - m_AABB: - m_Center: {x: 53.24073, y: -15.325765, z: 0.17397118} - m_Extent: {x: 69.29029, y: 36.12146, z: 51.77101} - m_DirtyAABB: 0 ---- !u!1 &3344194988163342497 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054721} - m_Layer: 0 - m_Name: RestOfFingers_Intermediate_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054721 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342497} - m_LocalRotation: {x: -0.0061721927, y: -0.012479491, z: 0.3441051, w: 0.93882793} - m_LocalPosition: {x: -8.025833, y: -1.7053026e-13, z: 4.8849813e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054733} - m_Father: {fileID: 3344194988163054725} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342499 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054723} - m_Layer: 0 - m_Name: RestOfFingers_Intermediate_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054723 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342499} - m_LocalRotation: {x: 0.0061717043, y: 0.012478555, z: 0.3440602, w: 0.9388444} - m_LocalPosition: {x: -8.02586, y: -1.1368684e-13, z: -5.3290705e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054735} - m_Father: {fileID: 3344194988163054727} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342501 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054725} - m_Layer: 0 - m_Name: RestOfFingers_Proximal_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054725 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342501} - m_LocalRotation: {x: -0.0028322286, y: -0.060177784, z: 0.060808677, w: 0.9963297} - m_LocalPosition: {x: -18.094389, y: 8.526513e-14, z: -1.0658141e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054721} - m_Father: {fileID: 3344194988163054807} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342503 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054727} - m_Layer: 0 - m_Name: RestOfFingers_Proximal_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054727 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342503} - m_LocalRotation: {x: 0.0028340812, y: 0.060178064, z: 0.060824323, w: 0.9963288} - m_LocalPosition: {x: -18.094444, y: 5.684342e-14, z: 1.7763568e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054723} - m_Father: {fileID: 3344194988163054801} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342505 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054729} - - component: {fileID: 3344194989366231820} - - component: {fileID: 663001753118162388} - - component: {fileID: 663001753118162387} - - component: {fileID: 663001753118162386} - m_Layer: 0 - m_Name: DragonFucker - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054729 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342505} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 13.8, y: 1.22, z: -11.53} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054795} - - {fileID: 3344194988163054791} - - {fileID: 3344194988163054811} - - {fileID: 3344194988163054809} - - {fileID: 3344194988163054827} - - {fileID: 3344194988163054829} - - {fileID: 3344194988163054847} - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!95 &3344194989366231820 -Animator: - serializedVersion: 3 - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342505} - m_Enabled: 1 - m_Avatar: {fileID: 9000000, guid: 860793eb3324391468f1c120a75ec049, type: 3} - m_Controller: {fileID: 9100000, guid: 95a44e56d04c7d248ba723eda9611c51, type: 2} - m_CullingMode: 0 - m_UpdateMode: 0 - m_ApplyRootMotion: 0 - m_LinearVelocityBlending: 0 - m_WarningMessage: - m_HasTransformHierarchy: 1 - m_AllowConstantClipSamplingOptimization: 1 - m_KeepAnimatorControllerStateOnDisable: 0 ---- !u!64 &663001753118162388 -MeshCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342505} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 4 - m_Convex: 0 - m_CookingOptions: 30 - m_Mesh: {fileID: 0} ---- !u!136 &663001753118162387 -CapsuleCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342505} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.5 - m_Height: 1 - m_Direction: 1 - m_Center: {x: 0, y: 1, z: 0} ---- !u!114 &663001753118162386 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342505} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 9826297ef4d853741b2af768441ec7f7, type: 3} - m_Name: - m_EditorClassIdentifier: - input_View: {x: 0, y: 0} - cameraHolder: {fileID: 0} - feetTransform: {fileID: 0} - playerSettings: - ViewXSensetivity: 0 - ViewYSensetivity: 0 - ViewXInverted: 0 - ViewYInverted: 0 - SprintingHold: 0 - MovementSmoothing: 0 - RunningForwardSpeed: 0 - RunningStrafeSpeed: 0 - WalkingForwardSpeed: 0 - WalkingBackwardSpeed: 0 - WalkingStrafeSpeed: 0 - JumpingHeight: 0 - JumpingFalloff: 0 - FallingSmoothing: 0 - SpeedEffector: 1 - CrouchSpeedEffector: 0 - ProneSpeedEffector: 0 - FallingSpeedEffector: 0 - ViewClampYMin: -70 - ViewClampYMax: 80 - playerMask: - serializedVersion: 2 - m_Bits: 0 - gravityAmount: 0 - gravityMin: 0 - jumpingForce: {x: 0, y: 0, z: 0} - playerStance: 0 - playerStanceSmoothing: 0 - playerStandStance: - CameraHeight: 0 - StanceCollider: {fileID: 0} - playerCrouchStance: - CameraHeight: 0 - StanceCollider: {fileID: 0} - playerProneStance: - CameraHeight: 0 - StanceCollider: {fileID: 0} - currentWeapon: {fileID: 0} ---- !u!1 &3344194988163342507 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054731} - m_Layer: 0 - m_Name: Neck - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054731 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342507} - m_LocalRotation: {x: 0.9631089, y: -0.26911193, z: 1.6478353e-17, w: -5.897341e-17} - m_LocalPosition: {x: -17.7523, y: -2.842171e-14, z: 0.000000026610966} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054805} - m_Father: {fileID: 3344194988163054815} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342509 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054733} - m_Layer: 0 - m_Name: RestOfFingers_Distal_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054733 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342509} - m_LocalRotation: {x: 0.0041774614, y: 0.0038508072, z: 0.015434083, w: 0.99986476} - m_LocalPosition: {x: -8.174185, y: 2.842171e-14, z: 2.6645353e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054721} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342511 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054735} - m_Layer: 0 - m_Name: RestOfFingers_Distal_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054735 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342511} - m_LocalRotation: {x: -0.004532425, y: -0.003425033, z: 0.11285209, w: 0.9935956} - m_LocalPosition: {x: -8.173605, y: -8.526513e-14, z: -3.5527137e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054723} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342513 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054737} - m_Layer: 0 - m_Name: ShoulderPadBody_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054737 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342513} - m_LocalRotation: {x: -0.0066352007, y: 0.011333432, z: 0.50519216, w: 0.86290693} - m_LocalPosition: {x: -16.02375, y: -6.3948846e-14, z: 1.2878587e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054741} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342515 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054739} - m_Layer: 0 - m_Name: ShoulderPadBody_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054739 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342515} - m_LocalRotation: {x: 0.0066350633, y: -0.011333145, z: 0.5051939, w: 0.862906} - m_LocalPosition: {x: -16.023825, y: -1.4210855e-14, z: 2.6645353e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054743} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342517 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054741} - m_Layer: 0 - m_Name: ShoulderPadCTRL_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054741 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342517} - m_LocalRotation: {x: -0.039328646, y: 0.10945006, z: -0.6506794, w: 0.7503934} - m_LocalPosition: {x: -9.938715, y: 14.657999, z: 1.039447} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054749} - - {fileID: 3344194988163054737} - m_Father: {fileID: 3344194988163054745} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342519 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054743} - m_Layer: 0 - m_Name: ShoulderPadCTRL_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054743 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342519} - m_LocalRotation: {x: 0.039328095, y: -0.109450735, z: -0.65068716, w: 0.75038666} - m_LocalPosition: {x: -9.938806, y: 14.657373, z: -1.0394562} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054751} - - {fileID: 3344194988163054739} - m_Father: {fileID: 3344194988163054747} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342521 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054745} - m_Layer: 0 - m_Name: Shoulder_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054745 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342521} - m_LocalRotation: {x: -0.29884863, y: 0.73412436, z: -0.41826612, w: -0.4436265} - m_LocalPosition: {x: -12.348376, y: -6.29761, z: -18.7061} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054741} - - {fileID: 3344194988163054769} - m_Father: {fileID: 3344194988163054815} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342523 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054747} - m_Layer: 0 - m_Name: Shoulder_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054747 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342523} - m_LocalRotation: {x: -0.2988425, y: 0.7341269, z: 0.41826975, w: 0.4436229} - m_LocalPosition: {x: -12.348807, y: -6.29741, z: 18.706108} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054743} - - {fileID: 3344194988163054771} - m_Father: {fileID: 3344194988163054815} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342525 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054749} - m_Layer: 0 - m_Name: ShoulderPadBlade_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054749 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342525} - m_LocalRotation: {x: -0.0066352007, y: 0.011333432, z: 0.50519216, w: 0.86290693} - m_LocalPosition: {x: -13.296815, y: -22.177006, z: 0.5073527} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054741} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342527 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054751} - m_Layer: 0 - m_Name: ShoulderPadBlade_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054751 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342527} - m_LocalRotation: {x: 0.0066350633, y: -0.011333145, z: 0.5051939, w: 0.862906} - m_LocalPosition: {x: -13.296959, y: -22.176832, z: -0.5073402} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054743} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342561 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054785} - m_Layer: 0 - m_Name: ArmPosition_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054785 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342561} - m_LocalRotation: {x: 0.0025357183, y: -0.0073521743, z: -0.47313836, w: 0.88095385} - m_LocalPosition: {x: -2.423428, y: -55.614994, z: 120.53} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054837} - - {fileID: 3344194988163054781} - m_Father: {fileID: 3344194988163054829} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342563 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054787} - m_Layer: 0 - m_Name: ArmPosition_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054787 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342563} - m_LocalRotation: {x: 0.0073521743, y: 0.002535718, z: 0.88095385, w: 0.47313833} - m_LocalPosition: {x: -2.423428, y: -55.614994, z: -120.53} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054829} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342565 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054789} - m_Layer: 0 - m_Name: BackPack - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054789 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342565} - m_LocalRotation: {x: -0.3617453, y: 0.5306367, z: 0.3968756, w: 0.6557856} - m_LocalPosition: {x: -10.219411, y: -38.523506, z: 0.00000010329652} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054793} - - {fileID: 3344194988163054799} - - {fileID: 3344194988163054797} - m_Father: {fileID: 3344194988163054815} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342567 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054791} - - component: {fileID: 3344194988174913001} - m_Layer: 0 - m_Name: AssaultRifle - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054791 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342567} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -118.131454, y: -98.01, z: -57.969975} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 3344194988163054729} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!137 &3344194988174913001 -SkinnedMeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342567} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 0 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 2100000, guid: 1666fb81395f51e4293b01cca8f58481, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 - serializedVersion: 2 - m_Quality: 0 - m_UpdateWhenOffscreen: 0 - m_SkinnedMotionVectors: 1 - m_Mesh: {fileID: 4300010, guid: 5ae220fa0f79ffe41b0102a618febf5a, type: 3} - m_Bones: - - {fileID: 3344194988163054829} - - {fileID: 3344194988163054763} - - {fileID: 3344194988163054815} - - {fileID: 3344194988163054731} - - {fileID: 3344194988163054805} - - {fileID: 3344194988163054825} - - {fileID: 3344194988163054831} - - {fileID: 3344194988163054747} - - {fileID: 3344194988163054771} - - {fileID: 3344194988163054845} - - {fileID: 3344194988163054801} - - {fileID: 3344194988163054727} - - {fileID: 3344194988163054723} - - {fileID: 3344194988163054735} - - {fileID: 3344194988163054843} - - {fileID: 3344194988163054823} - - {fileID: 3344194988163054819} - - {fileID: 3344194988163054753} - - {fileID: 3344194988163054765} - - {fileID: 3344194988163054761} - - {fileID: 3344194988163054743} - - {fileID: 3344194988163054739} - - {fileID: 3344194988163054751} - - {fileID: 3344194988163054745} - - {fileID: 3344194988163054769} - - {fileID: 3344194988163054835} - - {fileID: 3344194988163054807} - - {fileID: 3344194988163054725} - - {fileID: 3344194988163054721} - - {fileID: 3344194988163054733} - - {fileID: 3344194988163054841} - - {fileID: 3344194988163054821} - - {fileID: 3344194988163054817} - - {fileID: 3344194988163054759} - - {fileID: 3344194988163054755} - - {fileID: 3344194988163054767} - - {fileID: 3344194988163054741} - - {fileID: 3344194988163054737} - - {fileID: 3344194988163054749} - - {fileID: 3344194988163054789} - - {fileID: 3344194988163054793} - - {fileID: 3344194988163054797} - - {fileID: 3344194988163054799} - - {fileID: 3344194988163054775} - - {fileID: 3344194988163054833} - - {fileID: 3344194988163054813} - - {fileID: 3344194988163054757} - - {fileID: 3344194988163054777} - - {fileID: 3344194988163054773} - - {fileID: 3344194988163054839} - - {fileID: 3344194988163054803} - - {fileID: 3344194988163054779} - - {fileID: 3344194988163054783} - - {fileID: 3344194988163054785} - - {fileID: 3344194988163054837} - - {fileID: 3344194988163054781} - - {fileID: 3344194988163054787} - m_BlendShapeWeights: [] - m_RootBone: {fileID: 3344194988163054829} - m_AABB: - m_Center: {x: -34.27955, y: -49.079704, z: 120.48372} - m_Extent: {x: 95.48148, y: 94.6697, z: 10.629513} - m_DirtyAABB: 0 ---- !u!1 &3344194988163342569 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054793} - m_Layer: 0 - m_Name: ArmPlacement_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054793 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342569} - m_LocalRotation: {x: -0.002378591, y: 0.08787313, z: 0.02695381, w: 0.99576414} - m_LocalPosition: {x: -44.450283, y: 2.842171e-14, z: -2.1316282e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054789} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342571 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054795} - - component: {fileID: 3344194988174913003} - m_Layer: 0 - m_Name: Arm1 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054795 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342571} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: -163.22968, z: -0.3527179} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 3344194988163054729} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!137 &3344194988174913003 -SkinnedMeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342571} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 0 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 2100000, guid: 1666fb81395f51e4293b01cca8f58481, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 - serializedVersion: 2 - m_Quality: 0 - m_UpdateWhenOffscreen: 0 - m_SkinnedMotionVectors: 1 - m_Mesh: {fileID: 4300006, guid: 5ae220fa0f79ffe41b0102a618febf5a, type: 3} - m_Bones: - - {fileID: 3344194988163054829} - - {fileID: 3344194988163054763} - - {fileID: 3344194988163054815} - - {fileID: 3344194988163054731} - - {fileID: 3344194988163054805} - - {fileID: 3344194988163054825} - - {fileID: 3344194988163054831} - - {fileID: 3344194988163054747} - - {fileID: 3344194988163054771} - - {fileID: 3344194988163054845} - - {fileID: 3344194988163054801} - - {fileID: 3344194988163054727} - - {fileID: 3344194988163054723} - - {fileID: 3344194988163054735} - - {fileID: 3344194988163054843} - - {fileID: 3344194988163054823} - - {fileID: 3344194988163054819} - - {fileID: 3344194988163054753} - - {fileID: 3344194988163054765} - - {fileID: 3344194988163054761} - - {fileID: 3344194988163054743} - - {fileID: 3344194988163054739} - - {fileID: 3344194988163054751} - - {fileID: 3344194988163054745} - - {fileID: 3344194988163054769} - - {fileID: 3344194988163054835} - - {fileID: 3344194988163054807} - - {fileID: 3344194988163054725} - - {fileID: 3344194988163054721} - - {fileID: 3344194988163054733} - - {fileID: 3344194988163054841} - - {fileID: 3344194988163054821} - - {fileID: 3344194988163054817} - - {fileID: 3344194988163054759} - - {fileID: 3344194988163054755} - - {fileID: 3344194988163054767} - - {fileID: 3344194988163054741} - - {fileID: 3344194988163054737} - - {fileID: 3344194988163054749} - - {fileID: 3344194988163054789} - - {fileID: 3344194988163054793} - - {fileID: 3344194988163054797} - - {fileID: 3344194988163054799} - - {fileID: 3344194988163054775} - - {fileID: 3344194988163054833} - - {fileID: 3344194988163054813} - - {fileID: 3344194988163054757} - - {fileID: 3344194988163054777} - - {fileID: 3344194988163054773} - - {fileID: 3344194988163054839} - - {fileID: 3344194988163054803} - - {fileID: 3344194988163054779} - - {fileID: 3344194988163054783} - - {fileID: 3344194988163054785} - - {fileID: 3344194988163054837} - - {fileID: 3344194988163054781} - - {fileID: 3344194988163054787} - m_BlendShapeWeights: [] - m_RootBone: {fileID: 3344194988163054829} - m_AABB: - m_Center: {x: -58.594677, y: 13.632675, z: 0.00010681152} - m_Extent: {x: 29.436275, y: 28.906914, z: 140.37485} - m_DirtyAABB: 0 ---- !u!1 &3344194988163342573 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054797} - m_Layer: 0 - m_Name: ArmPlacement_Upper - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054797 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342573} - m_LocalRotation: {x: -0.002378591, y: 0.08787313, z: 0.02695381, w: 0.99576414} - m_LocalPosition: {x: -0.56803536, y: 46.34834, z: 10.906936} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054789} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342575 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054799} - m_Layer: 0 - m_Name: ArmPlacement_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054799 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342575} - m_LocalRotation: {x: -0.02695381, y: 0.99576414, z: -0.0023785909, w: -0.08787313} - m_LocalPosition: {x: 41.471436, y: 4.6548405, z: -15.306103} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054789} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342577 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054801} - m_Layer: 0 - m_Name: Hand_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054801 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342577} - m_LocalRotation: {x: 0.0005932963, y: -0.08528753, z: -0.006930911, w: 0.9963321} - m_LocalPosition: {x: -37.441498, y: 8.526513e-14, z: 2.7533531e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054843} - - {fileID: 3344194988163054727} - - {fileID: 3344194988163054753} - m_Father: {fileID: 3344194988163054845} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342579 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054803} - m_Layer: 0 - m_Name: Foot_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054803 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342579} - m_LocalRotation: {x: 0.81892335, y: 0.5656828, z: 0.024710972, w: -0.09357782} - m_LocalPosition: {x: -62.73464, y: -9.769963e-15, z: 2.4868996e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054779} - m_Father: {fileID: 3344194988163054839} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342581 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054805} - m_Layer: 0 - m_Name: Head - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054805 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342581} - m_LocalRotation: {x: 0.071218304, y: -0.028954746, z: -0.059960153, w: 0.99523586} - m_LocalPosition: {x: -13.255574, y: -1.4210855e-14, z: 7.364747e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054825} - - {fileID: 3344194988163054831} - m_Father: {fileID: 3344194988163054731} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342583 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054807} - m_Layer: 0 - m_Name: Hand_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054807 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342583} - m_LocalRotation: {x: -0.0005926622, y: 0.085287675, z: -0.0069382307, w: 0.99633205} - m_LocalPosition: {x: -37.441414, y: 1.7053026e-13, z: -5.3290705e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054841} - - {fileID: 3344194988163054725} - - {fileID: 3344194988163054759} - m_Father: {fileID: 3344194988163054835} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342585 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054809} - - component: {fileID: 3344194988174913005} - m_Layer: 0 - m_Name: Body1 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054809 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342585} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0.0000018693923, y: -129.49721, z: -6.0212374} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 3344194988163054729} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!137 &3344194988174913005 -SkinnedMeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342585} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 0 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 2100000, guid: 1666fb81395f51e4293b01cca8f58481, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 - serializedVersion: 2 - m_Quality: 0 - m_UpdateWhenOffscreen: 0 - m_SkinnedMotionVectors: 1 - m_Mesh: {fileID: 4300002, guid: 5ae220fa0f79ffe41b0102a618febf5a, type: 3} - m_Bones: - - {fileID: 3344194988163054829} - - {fileID: 3344194988163054763} - - {fileID: 3344194988163054815} - - {fileID: 3344194988163054731} - - {fileID: 3344194988163054805} - - {fileID: 3344194988163054825} - - {fileID: 3344194988163054831} - - {fileID: 3344194988163054747} - - {fileID: 3344194988163054771} - - {fileID: 3344194988163054845} - - {fileID: 3344194988163054801} - - {fileID: 3344194988163054727} - - {fileID: 3344194988163054723} - - {fileID: 3344194988163054735} - - {fileID: 3344194988163054843} - - {fileID: 3344194988163054823} - - {fileID: 3344194988163054819} - - {fileID: 3344194988163054753} - - {fileID: 3344194988163054765} - - {fileID: 3344194988163054761} - - {fileID: 3344194988163054743} - - {fileID: 3344194988163054739} - - {fileID: 3344194988163054751} - - {fileID: 3344194988163054745} - - {fileID: 3344194988163054769} - - {fileID: 3344194988163054835} - - {fileID: 3344194988163054807} - - {fileID: 3344194988163054725} - - {fileID: 3344194988163054721} - - {fileID: 3344194988163054733} - - {fileID: 3344194988163054841} - - {fileID: 3344194988163054821} - - {fileID: 3344194988163054817} - - {fileID: 3344194988163054759} - - {fileID: 3344194988163054755} - - {fileID: 3344194988163054767} - - {fileID: 3344194988163054741} - - {fileID: 3344194988163054737} - - {fileID: 3344194988163054749} - - {fileID: 3344194988163054789} - - {fileID: 3344194988163054793} - - {fileID: 3344194988163054797} - - {fileID: 3344194988163054799} - - {fileID: 3344194988163054775} - - {fileID: 3344194988163054833} - - {fileID: 3344194988163054813} - - {fileID: 3344194988163054757} - - {fileID: 3344194988163054777} - - {fileID: 3344194988163054773} - - {fileID: 3344194988163054839} - - {fileID: 3344194988163054803} - - {fileID: 3344194988163054779} - - {fileID: 3344194988163054783} - - {fileID: 3344194988163054785} - - {fileID: 3344194988163054837} - - {fileID: 3344194988163054781} - - {fileID: 3344194988163054787} - m_BlendShapeWeights: [] - m_RootBone: {fileID: 3344194988163054829} - m_AABB: - m_Center: {x: -31.982391, y: 2.5145874, z: 0.0000019073486} - m_Extent: {x: 51.12609, y: 46.604404, z: 27.749142} - m_DirtyAABB: 0 ---- !u!1 &3344194988163342587 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054811} - - component: {fileID: 3344194988174913007} - m_Layer: 0 - m_Name: Backpack1 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054811 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342587} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: -138.49533, z: 34.48629} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 3344194988163054729} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!137 &3344194988174913007 -SkinnedMeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342587} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 0 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 2100000, guid: 1666fb81395f51e4293b01cca8f58481, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 - serializedVersion: 2 - m_Quality: 0 - m_UpdateWhenOffscreen: 0 - m_SkinnedMotionVectors: 1 - m_Mesh: {fileID: 4300004, guid: 5ae220fa0f79ffe41b0102a618febf5a, type: 3} - m_Bones: - - {fileID: 3344194988163054829} - - {fileID: 3344194988163054763} - - {fileID: 3344194988163054815} - - {fileID: 3344194988163054731} - - {fileID: 3344194988163054805} - - {fileID: 3344194988163054825} - - {fileID: 3344194988163054831} - - {fileID: 3344194988163054747} - - {fileID: 3344194988163054771} - - {fileID: 3344194988163054845} - - {fileID: 3344194988163054801} - - {fileID: 3344194988163054727} - - {fileID: 3344194988163054723} - - {fileID: 3344194988163054735} - - {fileID: 3344194988163054843} - - {fileID: 3344194988163054823} - - {fileID: 3344194988163054819} - - {fileID: 3344194988163054753} - - {fileID: 3344194988163054765} - - {fileID: 3344194988163054761} - - {fileID: 3344194988163054743} - - {fileID: 3344194988163054739} - - {fileID: 3344194988163054751} - - {fileID: 3344194988163054745} - - {fileID: 3344194988163054769} - - {fileID: 3344194988163054835} - - {fileID: 3344194988163054807} - - {fileID: 3344194988163054725} - - {fileID: 3344194988163054721} - - {fileID: 3344194988163054733} - - {fileID: 3344194988163054841} - - {fileID: 3344194988163054821} - - {fileID: 3344194988163054817} - - {fileID: 3344194988163054759} - - {fileID: 3344194988163054755} - - {fileID: 3344194988163054767} - - {fileID: 3344194988163054741} - - {fileID: 3344194988163054737} - - {fileID: 3344194988163054749} - - {fileID: 3344194988163054789} - - {fileID: 3344194988163054793} - - {fileID: 3344194988163054797} - - {fileID: 3344194988163054799} - - {fileID: 3344194988163054775} - - {fileID: 3344194988163054833} - - {fileID: 3344194988163054813} - - {fileID: 3344194988163054757} - - {fileID: 3344194988163054777} - - {fileID: 3344194988163054773} - - {fileID: 3344194988163054839} - - {fileID: 3344194988163054803} - - {fileID: 3344194988163054779} - - {fileID: 3344194988163054783} - - {fileID: 3344194988163054785} - - {fileID: 3344194988163054837} - - {fileID: 3344194988163054781} - - {fileID: 3344194988163054787} - m_BlendShapeWeights: [] - m_RootBone: {fileID: 3344194988163054829} - m_AABB: - m_Center: {x: -28.89946, y: 44.769905, z: -1.5718498} - m_Extent: {x: 47.82257, y: 32.29384, z: 42.822285} - m_DirtyAABB: 0 ---- !u!1 &3344194988163342589 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054813} - m_Layer: 0 - m_Name: Foot_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054813 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342589} - m_LocalRotation: {x: 0.8193084, y: 0.56578285, z: -0.02233458, w: 0.09013736} - m_LocalPosition: {x: -62.734695, y: 2.842171e-14, z: -1.7763568e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054757} - m_Father: {fileID: 3344194988163054833} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342591 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054815} - m_Layer: 0 - m_Name: Chest - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054815 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342591} - m_LocalRotation: {x: 3.1378165e-33, y: -5.551116e-17, z: 0.18862787, w: 0.9820487} - m_LocalPosition: {x: -24.042006, y: -1.0658141e-14, z: 1.9451509e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054789} - - {fileID: 3344194988163054731} - - {fileID: 3344194988163054747} - - {fileID: 3344194988163054745} - m_Father: {fileID: 3344194988163054763} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342657 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054753} - m_Layer: 0 - m_Name: Thumb_Proximal_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054753 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342657} - m_LocalRotation: {x: 0.057522308, y: 0.49499637, z: 0.112048574, w: 0.8597179} - m_LocalPosition: {x: -0.2922163, y: -0.5636321, z: 12.295864} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054765} - m_Father: {fileID: 3344194988163054801} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342659 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054755} - m_Layer: 0 - m_Name: Thumb_Intermediate_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054755 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342659} - m_LocalRotation: {x: 0.048999686, y: 0.1354733, z: 0.1004816, w: 0.9844539} - m_LocalPosition: {x: -9.836, y: -5.684342e-14, z: -5.684342e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054767} - m_Father: {fileID: 3344194988163054759} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342661 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054757} - m_Layer: 0 - m_Name: Toe_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054757 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342661} - m_LocalRotation: {x: 0.0013659683, y: 0.0051824837, z: -0.24785845, w: 0.9687815} - m_LocalPosition: {x: -13.771131, y: 1.9539925e-14, z: -1.0658141e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054777} - m_Father: {fileID: 3344194988163054813} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342663 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054759} - m_Layer: 0 - m_Name: Thumb_Proximal_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054759 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342663} - m_LocalRotation: {x: -0.057529792, y: -0.49499094, z: 0.11207554, w: 0.859717} - m_LocalPosition: {x: -0.29218963, y: -0.56312394, z: -12.29586} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054755} - m_Father: {fileID: 3344194988163054807} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342665 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054761} - m_Layer: 0 - m_Name: Thumb_Distal_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054761 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342665} - m_LocalRotation: {x: -0.017883137, y: -0.03974522, z: -0.025908269, w: 0.99871385} - m_LocalPosition: {x: -8.816608, y: 0, z: 1.4210855e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054765} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342667 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054763} - m_Layer: 0 - m_Name: Spine - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054763 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342667} - m_LocalRotation: {x: 0.99452215, y: -0.104526356, z: -7.7878193e-10, w: 0.0000000074097675} - m_LocalPosition: {x: -21.090727, y: -8.881784e-15, z: -4.4013775e-16} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054815} - m_Father: {fileID: 3344194988163054829} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342669 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054765} - m_Layer: 0 - m_Name: Thumb_Intermediate_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054765 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342669} - m_LocalRotation: {x: -0.04900198, y: -0.13549508, z: 0.10049181, w: 0.98444974} - m_LocalPosition: {x: -9.835772, y: -1.1368684e-13, z: -2.842171e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054761} - m_Father: {fileID: 3344194988163054753} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342671 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054767} - m_Layer: 0 - m_Name: Thumb_Distal_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054767 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342671} - m_LocalRotation: {x: 0.0017341404, y: 0.008320127, z: -0.005799853, w: 0.9999471} - m_LocalPosition: {x: -8.816269, y: 0, z: -1.4210855e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054755} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342673 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054769} - m_Layer: 0 - m_Name: UpperArm_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054769 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342673} - m_LocalRotation: {x: 0.029697837, y: 0.15896995, z: -0.18122736, w: 0.97005326} - m_LocalPosition: {x: -15.111769, y: 0, z: -3.1974423e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054835} - m_Father: {fileID: 3344194988163054745} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342675 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054771} - m_Layer: 0 - m_Name: UpperArm_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054771 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342675} - m_LocalRotation: {x: -0.029700447, y: -0.15896967, z: -0.18123563, w: 0.97005165} - m_LocalPosition: {x: -15.111847, y: 0, z: -3.7303494e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054845} - m_Father: {fileID: 3344194988163054747} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342677 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054773} - m_Layer: 0 - m_Name: UpperLeg_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054773 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342677} - m_LocalRotation: {x: 0.03061261, y: 0.6954094, z: 0.7139454, w: 0.07583304} - m_LocalPosition: {x: 0.084786385, y: 0.40212917, z: 18.8642} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054839} - m_Father: {fileID: 3344194988163054829} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342679 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054775} - m_Layer: 0 - m_Name: UpperLeg_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054775 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342679} - m_LocalRotation: {x: -0.030611672, y: -0.6954005, z: 0.7139541, w: 0.07583354} - m_LocalPosition: {x: 0.08523814, y: 0.40205857, z: -18.864191} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054833} - m_Father: {fileID: 3344194988163054829} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342681 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054777} - m_Layer: 0 - m_Name: Toetip_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054777 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342681} - m_LocalRotation: {x: 0.00016529544, y: -0.016936114, z: -0.0047631934, w: 0.99984527} - m_LocalPosition: {x: -17.883871, y: 3.5527137e-15, z: 2.1316282e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054757} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342683 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054779} - m_Layer: 0 - m_Name: Toe_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054779 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342683} - m_LocalRotation: {x: -0.0013655907, y: -0.005181019, z: -0.24785995, w: 0.9687811} - m_LocalPosition: {x: -13.771146, y: 2.1316282e-14, z: 1.0658141e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054783} - m_Father: {fileID: 3344194988163054803} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342685 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054781} - m_Layer: 0 - m_Name: Trigger_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054781 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342685} - m_LocalRotation: {x: -0.0031178175, y: 0.0071248533, z: 0.40088162, w: 0.91609687} - m_LocalPosition: {x: -9.913989, y: -10.731702, z: 9.284221} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054785} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342687 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054783} - m_Layer: 0 - m_Name: Toetip_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054783 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342687} - m_LocalRotation: {x: -0.00028878966, y: 0.016934738, z: -0.012053749, w: 0.99978393} - m_LocalPosition: {x: -17.883856, y: -2.6645353e-15, z: -1.4210855e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054779} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} diff --git a/Assets/Prefabs/DragonFucker.prefab.meta b/Assets/Prefabs/DragonFucker.prefab.meta deleted file mode 100644 index 1e8b307..0000000 --- a/Assets/Prefabs/DragonFucker.prefab.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: c886079c5bf3e67408d356ea1a932c5f -PrefabImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/Assets/Prefabs/Player.prefab b/Assets/Prefabs/Player.prefab index 7dd28e6..c798d61 100755 --- a/Assets/Prefabs/Player.prefab +++ b/Assets/Prefabs/Player.prefab @@ -1,6 +1,6 @@ %YAML 1.1 %TAG !u! tag:unity3d.com,2011: ---- !u!1 &4528203470433968325 +--- !u!1 &5245491127989480125 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} @@ -8,101 +8,15 @@ GameObject: m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - - component: {fileID: 4528203470433968376} - - component: {fileID: 4528203470433968327} - - component: {fileID: 4528203470433968326} - m_Layer: 8 - m_Name: Camera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &4528203470433968376 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470433968325} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0.734, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4528203471293941515} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!20 &4528203470433968327 -Camera: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470433968325} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 1 - m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} - m_projectionMatrixMode: 1 - m_GateFitMode: 2 - m_FOVAxisMode: 0 - m_SensorSize: {x: 36, y: 24} - m_LensShift: {x: 0, y: 0} - m_FocalLength: 50 - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!81 &4528203470433968326 -AudioListener: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470433968325} - m_Enabled: 1 ---- !u!1 &4528203470625763699 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 4528203470625763689} - - component: {fileID: 4528203470625763688} - - component: {fileID: -4942972567661207728} - - component: {fileID: 4528203470625763702} - - component: {fileID: 4528203470625763690} - - component: {fileID: 4528203470625763703} - - component: {fileID: 4528203470625763701} - - component: {fileID: 4528203470625763700} - - component: {fileID: 1061105263471521090} - - component: {fileID: 1809549200} - - component: {fileID: 1809549201} - - component: {fileID: 1809549212} + - component: {fileID: 5245491127989480103} + - component: {fileID: 5245491127989480102} + - component: {fileID: 5583297852527723678} + - component: {fileID: 5245491127989480120} + - component: {fileID: 5245491127989480100} + - component: {fileID: 5245491127989480121} + - component: {fileID: 5245491127989480123} + - component: {fileID: 5245491127989480122} + - component: {fileID: 4890899368932544690} m_Layer: 8 m_Name: Player m_TagString: Defender @@ -110,37 +24,37 @@ GameObject: m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!4 &4528203470625763689 +--- !u!4 &5245491127989480103 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 28.21, y: 14.12, z: 48.395} + m_GameObject: {fileID: 5245491127989480125} + m_LocalRotation: {x: 0, y: 0.7071068, z: 0, w: 0.7071068} + m_LocalPosition: {x: 28.21, y: 10.9, z: 46.67} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - - {fileID: 4528203471293941515} - - {fileID: 4528203471164033737} + - {fileID: 5245491129196666053} + - {fileID: 5245491129603592455} m_Father: {fileID: 0} m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!33 &4528203470625763688 + m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} +--- !u!33 &5245491127989480102 MeshFilter: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} + m_GameObject: {fileID: 5245491127989480125} m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!23 &-4942972567661207728 +--- !u!23 &5583297852527723678 MeshRenderer: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} + m_GameObject: {fileID: 5245491127989480125} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -173,13 +87,13 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!136 &4528203470625763702 +--- !u!136 &5245491127989480120 CapsuleCollider: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} + m_GameObject: {fileID: 5245491127989480125} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 0 @@ -187,13 +101,13 @@ CapsuleCollider: m_Height: 2 m_Direction: 1 m_Center: {x: 0, y: 0, z: 0} ---- !u!136 &4528203470625763690 +--- !u!136 &5245491127989480100 CapsuleCollider: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} + m_GameObject: {fileID: 5245491127989480125} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 0 @@ -201,13 +115,13 @@ CapsuleCollider: m_Height: 1.3 m_Direction: 1 m_Center: {x: 0, y: -0.35, z: 0} ---- !u!136 &4528203470625763703 +--- !u!136 &5245491127989480121 CapsuleCollider: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} + m_GameObject: {fileID: 5245491127989480125} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 1 @@ -215,16 +129,16 @@ CapsuleCollider: m_Height: 0.8 m_Direction: 1 m_Center: {x: 0, y: -0.6, z: 0} ---- !u!143 &4528203470625763701 +--- !u!143 &5245491127989480123 CharacterController: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} + m_GameObject: {fileID: 5245491127989480125} m_Material: {fileID: 0} m_IsTrigger: 0 - m_Enabled: 0 + m_Enabled: 1 serializedVersion: 2 m_Height: 2 m_Radius: 0.5 @@ -233,21 +147,21 @@ CharacterController: m_SkinWidth: 0.08 m_MinMoveDistance: 0.001 m_Center: {x: 0, y: 0, z: 0} ---- !u!114 &4528203470625763700 +--- !u!114 &5245491127989480122 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} - m_Enabled: 0 + m_GameObject: {fileID: 5245491127989480125} + m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: 9826297ef4d853741b2af768441ec7f7, type: 3} m_Name: m_EditorClassIdentifier: input_View: {x: 0, y: 0} - cameraHolder: {fileID: 4528203471293941515} - feetTransform: {fileID: 4528203471164033737} + cameraHolder: {fileID: 5245491129196666053} + feetTransform: {fileID: 5245491129603592455} playerSettings: ViewXSensetivity: 20 ViewYSensetivity: 20 @@ -279,98 +193,27 @@ MonoBehaviour: playerStanceSmoothing: 0.2 playerStandStance: CameraHeight: 0.7 - StanceCollider: {fileID: 4528203470625763702} + StanceCollider: {fileID: 5245491127989480120} playerCrouchStance: CameraHeight: 0.3 - StanceCollider: {fileID: 4528203470625763690} + StanceCollider: {fileID: 5245491127989480100} playerProneStance: CameraHeight: -0.58 - StanceCollider: {fileID: 4528203470625763703} - currentWeapon: {fileID: 0} ---- !u!114 &1061105263471521090 + StanceCollider: {fileID: 5245491127989480121} + currentWeapon: {fileID: 8510909888689775086} +--- !u!114 &4890899368932544690 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} + m_GameObject: {fileID: 5245491127989480125} m_Enabled: 1 m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: a6f2a081cfc8c4b4bb6864331109d147, type: 3} + m_Script: {fileID: 11500000, guid: a8c9a8e604d395c4ab9d03d28adc4982, type: 3} m_Name: m_EditorClassIdentifier: - agentParameters: - maxStep: 0 - hasUpgradedFromAgentParameters: 1 - MaxStep: 10 - moveController: {fileID: 1809549200} ---- !u!114 &1809549200 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: d3ebcf807a37f344998fd648dfc9376d, type: 3} - m_Name: - m_EditorClassIdentifier: - mapManager: {fileID: 0} - navMeshAgent: {fileID: 1809549201} ---- !u!195 &1809549201 -NavMeshAgent: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} - m_Enabled: 1 - m_AgentTypeID: 0 - m_Radius: 0.5 - m_Speed: 3.5 - m_Acceleration: 8 - avoidancePriority: 50 - m_AngularSpeed: 120 - m_StoppingDistance: 0 - m_AutoTraverseOffMeshLink: 1 - m_AutoBraking: 1 - m_AutoRepath: 1 - m_Height: 2 - m_BaseOffset: 1 - m_WalkableMask: 4294967295 - m_ObstacleAvoidanceType: 4 ---- !u!114 &1809549212 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} - m_Name: - m_EditorClassIdentifier: - m_BrainParameters: - VectorObservationSize: 4 - NumStackedVectorObservations: 1 - m_ActionSpec: - m_NumContinuousActions: 0 - BranchSizes: 01000000 - VectorActionSize: 01000000 - VectorActionDescriptions: [] - VectorActionSpaceType: 0 - hasUpgradedBrainParametersWithActionSpec: 1 - m_Model: {fileID: 0} - m_InferenceDevice: 3 - m_BehaviorType: 1 - m_BehaviorName: Defender Behaviour - TeamId: 0 - m_UseChildSensors: 0 - m_UseChildActuators: 1 - m_ObservableAttributeHandling: 2 ---- !u!1 &4528203471164033736 +--- !u!1 &5245491128202443531 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} @@ -378,29 +221,82 @@ GameObject: m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - - component: {fileID: 4528203471164033737} + - component: {fileID: 5245491128202443574} + - component: {fileID: 5245491128202443529} + - component: {fileID: 5245491128202443528} m_Layer: 8 - m_Name: FeetTransform + m_Name: Camera m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!4 &4528203471164033737 +--- !u!4 &5245491128202443574 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203471164033736} + m_GameObject: {fileID: 5245491128202443531} m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: -1, z: 0} + m_LocalPosition: {x: 0, y: 0.388, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] - m_Father: {fileID: 4528203470625763689} - m_RootOrder: 1 + m_Father: {fileID: 5245491129196666053} + m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &4528203471293941514 +--- !u!20 &5245491128202443529 +Camera: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5245491128202443531} + m_Enabled: 1 + serializedVersion: 2 + m_ClearFlags: 1 + m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} + m_projectionMatrixMode: 1 + m_GateFitMode: 2 + m_FOVAxisMode: 0 + m_SensorSize: {x: 36, y: 24} + m_LensShift: {x: 0, y: 0} + m_FocalLength: 50 + m_NormalizedViewPortRect: + serializedVersion: 2 + x: 0 + y: 0 + width: 1 + height: 1 + near clip plane: 0.1 + far clip plane: 1000 + field of view: 60 + orthographic: 0 + orthographic size: 5 + m_Depth: 0 + m_CullingMask: + serializedVersion: 2 + m_Bits: 4294967295 + m_RenderingPath: -1 + m_TargetTexture: {fileID: 0} + m_TargetDisplay: 0 + m_TargetEye: 3 + m_HDR: 1 + m_AllowMSAA: 1 + m_AllowDynamicResolution: 0 + m_ForceIntoRT: 0 + m_OcclusionCulling: 1 + m_StereoConvergence: 10 + m_StereoSeparation: 0.022 +--- !u!81 &5245491128202443528 +AudioListener: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5245491128202443531} + m_Enabled: 1 +--- !u!1 &5245491129196666052 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} @@ -408,7 +304,7 @@ GameObject: m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - - component: {fileID: 4528203471293941515} + - component: {fileID: 5245491129196666053} m_Layer: 8 m_Name: CameraHolder m_TagString: Untagged @@ -416,18 +312,194 @@ GameObject: m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!4 &4528203471293941515 +--- !u!4 &5245491129196666053 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203471293941514} + m_GameObject: {fileID: 5245491129196666052} m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} m_LocalPosition: {x: 0, y: 0.7, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - - {fileID: 4528203470433968376} - m_Father: {fileID: 4528203470625763689} + - {fileID: 5245491128202443574} + - {fileID: 8510909888689775087} + m_Father: {fileID: 5245491127989480103} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &5245491129603592454 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 5245491129603592455} + m_Layer: 8 + m_Name: FeetTransform + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &5245491129603592455 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5245491129603592454} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 0, y: -1, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 5245491127989480103} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &8510909888198732725 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 8510909888198732726} + - component: {fileID: 8510909888198732713} + - component: {fileID: 8510909888198732712} + - component: {fileID: 8510909888198732727} + m_Layer: 8 + m_Name: WeaponModel + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &8510909888198732726 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 8510909888198732725} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 8510909888689775087} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!33 &8510909888198732713 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 8510909888198732725} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!23 &8510909888198732712 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 8510909888198732725} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!65 &8510909888198732727 +BoxCollider: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 8510909888198732725} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 1 + serializedVersion: 2 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!1 &8510909888689775085 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 8510909888689775087} + - component: {fileID: 8510909888689775086} + m_Layer: 8 + m_Name: Weapon + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &8510909888689775087 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 8510909888689775085} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0.332, y: 0.038, z: 0.394} + m_LocalScale: {x: 0.16226998, y: 0.1581135, z: 1} + m_Children: + - {fileID: 8510909888198732726} + m_Father: {fileID: 5245491129196666053} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &8510909888689775086 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 8510909888689775085} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 088bf904d7c90a44dbb35c1d47c2692e, type: 3} + m_Name: + m_EditorClassIdentifier: + settings: + SwayAmount: 4 + SwayYInverted: 0 + SwayXInverted: 0 + SwaySmoothing: 0.1 + SwayResetSmoothing: 0.1 + SwayClampX: 8 + SwayClampY: 8 diff --git a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.meta b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.meta deleted file mode 100644 index 583669d..0000000 --- a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 63fefa6166d5e6c4b96e83cc243f0a18 -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity index d32bf5b..41d83a6 100755 --- a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity +++ b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity @@ -259,7 +259,8 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: - PointId: 1 + DeathAttr: 0 + EnemiesSeen: [] --- !u!23 &140697607 MeshRenderer: m_ObjectHideFlags: 0 @@ -352,7 +353,8 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: - PointId: 1 + DeathAttr: 0 + EnemiesSeen: [] --- !u!23 &293522541 MeshRenderer: m_ObjectHideFlags: 0 @@ -1313,7 +1315,8 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: - PointId: 1 + DeathAttr: 0 + EnemiesSeen: [] --- !u!23 &1116745545 MeshRenderer: m_ObjectHideFlags: 0 @@ -1760,7 +1763,8 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: - PointId: 1 + DeathAttr: 0 + EnemiesSeen: [] --- !u!23 &1345085343 MeshRenderer: m_ObjectHideFlags: 0 @@ -1853,7 +1857,8 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: - PointId: 1 + DeathAttr: 0 + EnemiesSeen: [] --- !u!23 &1488699524 MeshRenderer: m_ObjectHideFlags: 0 @@ -2072,7 +2077,8 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: - PointId: 1 + DeathAttr: 0 + EnemiesSeen: [] --- !u!23 &1663305224 MeshRenderer: m_ObjectHideFlags: 0 @@ -2132,6 +2138,7 @@ GameObject: - component: {fileID: 1858987086} - component: {fileID: 1858987084} - component: {fileID: 1858987089} + - component: {fileID: 1858987090} - component: {fileID: 1858987088} - component: {fileID: 1858987085} m_Layer: 0 @@ -2153,7 +2160,6 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 81d1d84442a0ba441976abd6fdd22788, type: 3} m_Name: m_EditorClassIdentifier: - CurrentTime: 0 --- !u!114 &1858987085 MonoBehaviour: m_ObjectHideFlags: 0 @@ -2227,6 +2233,26 @@ MonoBehaviour: - {fileID: 1345085342} - {fileID: 1116745544} - {fileID: 2004854094} +--- !u!114 &1858987090 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1858987083} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: b0835d77f48130e4f81c678f710bf87c, type: 3} + m_Name: + m_EditorClassIdentifier: + spawnPointsForDefendersTeam: + - {fileID: 140697606} + spawnPointsForAttackersTeam: + - {fileID: 2004854094} + AIPrefab: {fileID: 2988578997639256874, guid: b016874eb34cc084aa4359f0bbec50e1, + type: 3} + PlayerPrefab: {fileID: 5245491127989480125, guid: 80f6c1c85e5daed4c96c70205ed5503d, + type: 3} --- !u!1 &1871257865 GameObject: m_ObjectHideFlags: 0 @@ -2480,7 +2506,8 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: - PointId: 1 + DeathAttr: 0 + EnemiesSeen: [] --- !u!23 &2004854095 MeshRenderer: m_ObjectHideFlags: 0 @@ -2698,88 +2725,6 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 2114154251} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!114 &5078004101906046130 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: a6f2a081cfc8c4b4bb6864331109d147, type: 3} - m_Name: - m_EditorClassIdentifier: - agentParameters: - maxStep: 0 - hasUpgradedFromAgentParameters: 1 - MaxStep: 100 - moveController: {fileID: 5242608118223468128} ---- !u!114 &5242608118223468128 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: d3ebcf807a37f344998fd648dfc9376d, type: 3} - m_Name: - m_EditorClassIdentifier: - navMeshAgent: {fileID: 5242608118223468129} ---- !u!195 &5242608118223468129 -NavMeshAgent: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Enabled: 1 - m_AgentTypeID: 0 - m_Radius: 0.5 - m_Speed: 3.5 - m_Acceleration: 8 - avoidancePriority: 50 - m_AngularSpeed: 120 - m_StoppingDistance: 0 - m_AutoTraverseOffMeshLink: 1 - m_AutoBraking: 1 - m_AutoRepath: 1 - m_Height: 2 - m_BaseOffset: 1 - m_WalkableMask: 4294967295 - m_ObstacleAvoidanceType: 4 ---- !u!114 &5242608118223468140 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} - m_Name: - m_EditorClassIdentifier: - m_BrainParameters: - VectorObservationSize: 4 - NumStackedVectorObservations: 1 - m_ActionSpec: - m_NumContinuousActions: 0 - BranchSizes: 01000000 - VectorActionSize: 01000000 - VectorActionDescriptions: [] - VectorActionSpaceType: 0 - hasUpgradedBrainParametersWithActionSpec: 1 - m_Model: {fileID: 0} - m_InferenceDevice: 0 - m_BehaviorType: 1 - m_BehaviorName: NPC - TeamId: 0 - m_UseChildSensors: 1 - m_UseChildActuators: 1 - m_ObservableAttributeHandling: 0 --- !u!1001 &6818223692685937217 PrefabInstance: m_ObjectHideFlags: 0 @@ -2854,366 +2799,3 @@ PrefabInstance: objectReference: {fileID: 0} m_RemovedComponents: [] m_SourcePrefab: {fileID: 100100000, guid: 1685c1d9ce4ab174f95c646b1826010b, type: 3} ---- !u!23 &8333476723876163232 -MeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 2 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 1 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!1 &8509012040201336570 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 8509012040201336571} - m_Layer: 8 - m_Name: CameraHolder - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &8509012040201336571 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040201336570} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0.7, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 8509012041069629704} - m_Father: {fileID: 8509012040873181337} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &8509012040340093752 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 8509012040340093753} - m_Layer: 8 - m_Name: FeetTransform - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &8509012040340093753 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040340093752} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: -1, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 8509012040873181337} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &8509012040873181315 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 8509012040873181337} - - component: {fileID: 8509012040873181336} - - component: {fileID: 8333476723876163232} - - component: {fileID: 8509012040873181318} - - component: {fileID: 8509012040873181338} - - component: {fileID: 8509012040873181319} - - component: {fileID: 8509012040873181317} - - component: {fileID: 8509012040873181316} - - component: {fileID: 5078004101906046130} - - component: {fileID: 5242608118223468128} - - component: {fileID: 5242608118223468129} - - component: {fileID: 5242608118223468140} - - component: {fileID: 8509012040873181320} - m_Layer: 8 - m_Name: 'Player ' - m_TagString: Defender - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!114 &8509012040873181316 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Enabled: 0 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 9826297ef4d853741b2af768441ec7f7, type: 3} - m_Name: - m_EditorClassIdentifier: - input_View: {x: 0, y: 0} - cameraHolder: {fileID: 8509012040201336571} - feetTransform: {fileID: 8509012040340093753} - playerSettings: - ViewXSensetivity: 20 - ViewYSensetivity: 20 - ViewXInverted: 0 - ViewYInverted: 0 - SprintingHold: 0 - MovementSmoothing: 0 - RunningForwardSpeed: 10 - RunningStrafeSpeed: 6 - WalkingForwardSpeed: 4 - WalkingBackwardSpeed: 2 - WalkingStrafeSpeed: 3 - JumpingHeight: 6 - JumpingFalloff: 1 - FallingSmoothing: 0 - SpeedEffector: 1 - CrouchSpeedEffector: 0 - ProneSpeedEffector: 0 - FallingSpeedEffector: 0 - ViewClampYMin: -70 - ViewClampYMax: 80 - playerMask: - serializedVersion: 2 - m_Bits: 55 - gravityAmount: 0.05 - gravityMin: -3 - jumpingForce: {x: 0, y: 0, z: 0} - playerStance: 0 - playerStanceSmoothing: 0.2 - playerStandStance: - CameraHeight: 0.7 - StanceCollider: {fileID: 8509012040873181318} - playerCrouchStance: - CameraHeight: 0.3 - StanceCollider: {fileID: 8509012040873181338} - playerProneStance: - CameraHeight: -0.58 - StanceCollider: {fileID: 8509012040873181319} - currentWeapon: {fileID: 0} ---- !u!143 &8509012040873181317 -CharacterController: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Height: 2 - m_Radius: 0.5 - m_SlopeLimit: 45 - m_StepOffset: 0.3 - m_SkinWidth: 0.08 - m_MinMoveDistance: 0.001 - m_Center: {x: 0, y: 0, z: 0} ---- !u!136 &8509012040873181318 -CapsuleCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - m_Radius: 0.3 - m_Height: 2 - m_Direction: 1 - m_Center: {x: 0, y: 0, z: 0} ---- !u!136 &8509012040873181319 -CapsuleCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.3 - m_Height: 0.8 - m_Direction: 1 - m_Center: {x: 0, y: -0.6, z: 0} ---- !u!114 &8509012040873181320 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 3a5c9d521e5ef4759a8246a07d52221e, type: 3} - m_Name: - m_EditorClassIdentifier: - DecisionPeriod: 1 - TakeActionsBetweenDecisions: 0 ---- !u!33 &8509012040873181336 -MeshFilter: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!4 &8509012040873181337 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_LocalRotation: {x: -0, y: 0.6820348, z: -0, w: 0.7313197} - m_LocalPosition: {x: 40.54, y: 1, z: -15.91} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 8509012040201336571} - - {fileID: 8509012040340093753} - m_Father: {fileID: 0} - m_RootOrder: 5 - m_LocalEulerAnglesHint: {x: 0, y: 86.006004, z: 0} ---- !u!136 &8509012040873181338 -CapsuleCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - m_Radius: 0.3 - m_Height: 1.3 - m_Direction: 1 - m_Center: {x: 0, y: -0.35, z: 0} ---- !u!4 &8509012041069629704 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012041069629749} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0.734, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 8509012040201336571} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &8509012041069629749 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 8509012041069629704} - - component: {fileID: 8509012041069629751} - - component: {fileID: 8509012041069629750} - m_Layer: 8 - m_Name: Camera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!81 &8509012041069629750 -AudioListener: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012041069629749} - m_Enabled: 1 ---- !u!20 &8509012041069629751 -Camera: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012041069629749} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 1 - m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} - m_projectionMatrixMode: 1 - m_GateFitMode: 2 - m_FOVAxisMode: 0 - m_SensorSize: {x: 36, y: 24} - m_LensShift: {x: 0, y: 0} - m_FocalLength: 50 - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 diff --git a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever/NavMesh.asset b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever/NavMesh.asset deleted file mode 100644 index 141ab0ec22d2d373678f8d13b269f8ff74a49593..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 11248 zcmbVSYj9mv72Y?wN%~0Jqz|M8=`9LEv5yo;i%^=hX>AcoXq$*U47bT`dL=iB_ufDY zil-o;!tle9QJHZDW}J?u&gl3D`Qgx3>I{s-A8{BI{lN^5Ggjn>4~V1n`_|g)JnqS9 zfIU6yti8Xz*4k_Bz1QC7rp~Rn*14-2cv<9G0gSE@n(tNMbN?6EFvzEGWlkA8)UzPhvQzH!gKC-klllJ>R*C)X8__$ zqx}xFcb+tqwvzTciAS*Dx1&z`)W4K?5Ov78yx2dCCgRIdcuT_n<-~hHe95`ig#TFV z4fb2zv+&RSzaWL@T@7sy;-POJOW`dd|1jDazY8<)Cp^9~1OJZ4S7qQo@c2a;_;HV4 zoPqz;;~&q!f8p^ z8INCof4DJiZ|V|Dne>&Vn<4Bmay4U9;fN0YByO zO|#&Xf5qchXW(ymoVmU+K9qmUL{2#y; ze!Lhs^V1gMK^#x!=j({~#PMW){0#93hW2K6C3G`CF}GZw!t?HJjBhjgF!$decmY&P z&b7$;>H*I6w-rx^D<}wR_;!!qn1T0sd|L|7yQ4V&c6s|ff`j-%{fz&msMCHgo=z8l zY9Ss!zqg|~iEDiKK|kZW1Niyk+eiE)h%b%j&rY;A#^+x*>^u>nCo)c$@$9kMn<93TLYe9)*3b?|mt}MdI@maK>l9 z-~~`E)c+jhZG9g=b22`PTYLsGaEs4i3RnBz_4YrX!jtxI&%n+87cy|Oe@6<}`FYmQ z&z-~r5MLVS=PtCLZ+`A3Zo+soem{b4&QAgLFXBn!Z4&<%y#GUjv!@o`CFAkG2hGX& zD{lT5GjQ|&Ur3Q{3V=k%3$MCR4cDf7jdpatcq{uV&z8pLg-u#%K2HDLgO!{R{Rve^Y{k z_`>}D1kTTM!12%BhbI}|yqy0F@Zp&Qc7F!m>G2~eoKh}W1DyHoC~L98`ybD5%Wn?| zRtS4=e&h4Q?-1t-LP-p__f>08}Ek0irq#%^U>|1_&NRWb15|g+5`ZYlcLP-oa z`+*<@p(KWz{bPa@gp%<7^fvAf8xbGI{b4~0LP_j?Z1x`!q#%^UaI^n)K?*`i3^)6a z3Q`bCV)*s2&-L+`AO)c$GCs`DR^Z$}j|)-|N@DUhzHbOp5K3aWjqjU+6oir(ZsYrw zAO)c$w9ov4-vDHM{Em#5y~X=(Db)`573)RU-(RoZuydqbpQ!FUQZJ3y$`j)^y2{Y5 z@>pq4ADI4P{V*8m*Iz9iDjwW3UaJ?!hfD0{RUJU@nu}FLcV#Fvv%6S3+_h;7B*?jg zN9(29og44=OqVg6jJevFYmB+pm~LY>8?(ij>x}7&REN5|Lgihd_O4KU7xeq_Z=WiU zjqESg>*ev$TI6H0xj0%HuOB!%$sh^8uQ*bkswuy_R31HCSAPH4#AHd?9b@InK{r+! z87=iyCnkNbO7X}|Qqcpx=ML`UuhJR(OWGQU8OiaRUF$<8XMbRexT%r zjnQ^SU)t-&6o-eWs>OP#f2=rMs$irE-}*`ir$$2uM_7ZKyT*#*t`4hGoUFN!mFb+E zsFmv&kvoEn^nk0Es?}n7+|RUj<$PlyL=}$?ma6sgFp$d7p79aSZJ(GLA7RrD46sxM zBP*~DX79vELe2I;wlF9N#SvF_cn{rr=n(DQTpqu-=6a{<6R`$wcYnQDrM{ixBcOhY zDnk_E+@VdwD=mHHYH3&;tPJg+n5qt!!V&J9s22sfA0qu02S~?O0e6_>ZFlmDdzC@U zvlqm7-bdrxAm-Pmn-BgqJcGS^`|Qm5FYXA`Ory+4W}IgeA1%R)0|PIgJ~6#^`!nBP z(>pyqecHK`J%a;JZ~NKw^j6l1rTH_bqdsRK!|a2^g3=VYKo}6SZFO2^uO|6rhB7U< z^|ZQ$=-Tdhq6eFbB*)d)DpaS!mmxPTg)ToCR(@7ZNd7SFMcY&B;;+*DQ{~y$y3)n>92Z?@5Wl_ z2-_n)Yh&%*r&};$?oAf#1=)M#ZH+`bg{`fA*C@`t!Fvw(D@&7W4NQZ{w}~c!L7KxF zamp{icNAl#r72(>SqyJNkNKh17HkbnfXB_QG{8DbgsOgPfg}pG+8imbzNhy&_2lp; zqQ?Di?e=cOnI9;MxzGI*Ya?F&PojQS#`O1T8o4&K|H(YDt zP#mW|t-lKV9r5K2$iL&|lkwxQ)1@(g*?Olj%4-xg%8Uuu879fEcgHF(*P!lS zVpz|9oksbaYno2QVQvlVeZMpPyx!2g=M<;(pSPx&74qrQ_8675QoY&^OIl z>ZkGa)B2?RbRXVN=C5!n&g*Ii7EwHZVTnloV(wajhi{fnJkhbw`J^v;?n;MP{u&%u z80IhXQ9f#q@oQuGi@fGI_NQK+*=i%6vy5*K`{>%z{>~LnMR~Ju%IqE(xGdyPOdk*X z3=aJGl-VM0`82NcC)3-*KF)m)ypAExvyZM#jRe;s*9;5ygXV-NFJdhQHa*1Iztb&s z%iMBsD1lHRhA+ZjB*|-G7M&-o;q#FfZLTQam^bGGZTDaF%o{4LqoDo8xXYM+obxg! zyu$JG@1-4FRVeHq*RS=qP$#{!u}~-dp9lSm@z&Bd3%=Mb@mgu$1$qjYe_jwT9i>u@ zGWA=;HLq*C=5p<+{%X7E_j+`k+D;!$zm8R9mq>e)%;{`rKx58ttW*1Ja&xt3;n5i~ zsm0yQv?49|Jz)?98*eolzUa{v)zB;{EN6+8}4xjNC=BjmLs_$5CX}O%fMfPhi{ewp3Cttr(gm59I0m> zno)S>^+;~RdPNS~8{;kiYhI1=pY?wa`DvTai!(9a#q_*)%DH~uhaoe526;>M3=X^v zUhOKc?I)&xAJh3(*oWX#3l&X)~M`NhGt_NC*N1oq5EZUwvk5s4rMxuIj-D~X&H%W(SIEGv>AWurjG%OJtwtJp0GbIjyt*^L}qc=ceA=OkU4&%a<(SH~I zXG%Lp!FXw%;~kV-xhBL9$o-PzyG#0?mU~gWPWh<*g5)>av;N#yic|mdaxYSx^(SR@ z#qmfV@keAI6Mqu>hw_%cp*FKLesf}oa?jve(bcS?TuXfT&4{!l_YC{ZOndO2q4x=% zh4#%E=W+hx-k^QR>t4}SM;#ySoAZ4&|H0@s=X!c(=_+j)(~W}ZMwzACZuou@I(B31 RH-RtoG|4;OH2|J0{|BbvTQL9t diff --git a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever/NavMesh.asset.meta b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever/NavMesh.asset.meta deleted file mode 100644 index 10e1563..0000000 --- a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever/NavMesh.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 305bb221606d7a748acca94156e2d347 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 23800000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/Assets/Scenes/Greatest_map_ever/NavMesh.asset b/Assets/Scenes/Greatest_map_ever/NavMesh.asset index 71de95dda0ea4b5bf319181954f14539a991e4eb..d98232af1fbb79cc21a9f7871c6f98cb79807730 100755 GIT binary patch literal 11248 zcmbVSYj9mv72Y?wNt!lIlRl6Zgj*DZVjn5=fl!*HX>AcoXq$*U47bT`dL=LKy@3`K zPeDM1;fEumGUE))I2}!$(eV%R!=bIz85oB@;xH)sgBcuWtjG@^5J&6xt+m&A+>_G) zdwSMcdw+Yawb$Bvuf5Mrom;WVxvSrG&aJ?|e!Shr4PDo*yLR0*n+|ok|D8K`E|){& zJ4DW%`*z#SQsrco)6? zCTR;{ph^ABf(K!Tdi^aaJm(&3a6!fE=NQg6{??Rya{LQZc+TyHe<$EHKr31> zzFZgL!-qIynCp(?jS|0$po6hlOk5&m@{R5`$bJbA$K9U7bMAML-Gg@OUxIgM0OCu% z{SLHuo-~xUlJ+}^N3h_xqfYzOzm#|ob;!BA*guRW;>%KaQ^Nn{#JfR!$+_l)|5)sG z_M6?a@X!3eAcf~$6>SgVp>H2c;Y}j{FxnZv3p4O1Jiamm|BlC3W#B*X_(d7`agSe| zf&bLwAJ4#l;qgl{@KYYYGy{LdIzpYr&| zIdICq;_+)T@HafpTwfm_%D?4t&QU%74`A&$c<^$Yi3cFQFn=*WEcZChIoHu`C}lK1 zUJRW1X)Ey{jwkc;b;P^lcrrhJhIj-+d!xGwx|yGtTdq&xdG|KPw*`He`)?4u0IDVD znq+--1LykNhNr_76a+QA*W)*4;C&w7p2G9)D9*oK-hQ{>AihvP<9`|IwBLiL(*>Yf zh{wSn=^2mzgtqcp05vj z|Mn6OKzzac>nAB)mVpjr+p}#D{TzSdfBH61yLp{YL~T2qiJx?0;R5f>08}&Hkf; z6oir(em(4SeLN;eK`4og5A(APIQP%vf)s?3n7obe8-f&sk{E8|`=%fTp(KXe_`W4b zK`06BGr!@BjaUn@xEJ1)x&+oTG93Q*D5#c94Xf(D?5+WN)y%cOOnxAmuapiI5AK<$)`}CuC3f?w4xo3{#VVqEd?+-tyI4KkwQ(yX$hm_@ zYo+R)8}9Z@moXcSxyG1ljoD<(W@EM(v(=dEjOmJ0hq}8$*4?N~4&Jo=U0c#)}iv#jzcwv9bN-2TE?( z7;R_trM+fMad>#TQmmEw$BM(Haf~$KTVLtm^l0ed2y1Y2*H}^9)nSbnr>ZVwWjd!O ztK}L-Y7kN~Kty@H4GlIp0_aQN^Q!rAnQ&tT8qK09;%i#q}}(4PCIw9dvM_C?LV8D*~U7tG=JuF)aMLjn0=5~P#OXk2m@latxn7A)gZsjP^JmD zo@TcQUE3UQob#8spVU8}pBz8Zp7(Pm;b+pfnV6r-JNN6*fAVQR^8rWvLHu;^bKyty zvjH~d_j4xn)47-H{EYi{Cg!K|>SyG0ou9Ct4?sSIpSR(R`(X3=CY+0}TmHR(wwm!^ zUC5htu5q55T7nl(__fJ(i+RM{MC;bMtyrI};-~UULf+<_^0tPPJasLQ{;Jpb%~%T^ zVSA)!O{{(M=_ZVrdy@rwLG~VbTO-jUeP_LX(|6YWhHFh6 zisRI$^;dzvBfh)=`FFg0GJYI(y42?{TkkYRd5xk*nK9uy!zB6juK7#X<;m{k8r1zu z4C~pi(3mWTrRllorZ`{>%#NN_E3&9HDkXikXoBGzJH(?gv7JKa*Z z%q<6p5(pJy_+tD;lDrmX(FMX9-io|vb4B_3yx9u0&41A|Z>Y46g7({Smofb~=VeTI zh2!VnOFOu#P}o1NU+XQQPI_l!p-%e00Q%eU*3vQu-tLxot+ej~Jq64^FNl|pQmICn z`Yqy`*R@@9x%O0lwO#akJvvTpr;nyz$EvbRq`g7rbgna?KIhlhseLZF`P#Ga=nR>4 zfAu+?CfeLF_wTUI9KT-fUD}^#!g}$V^|elMx)z-=#~ja)c%89|*LjBU%s_#?wKC_# z*Sa^L|CpTJ)Q`W90iSht=2_j&sP$t15m_7Y{&q&Kmoscu?jpobdwrZ=+KcPc`s-q! zBV+w3-`~co_2*>{iNEA=uHE^{0W2QNkz5f7f#k?#;4i_$H%kG}6?m9auz-Dz)H4sw zC_MAJCAVR{B8Tmb@s|HJuSWUL`oD+#wB6^$*%3l2f!}drI+cno}zT6_VG>>lec8{Zv@y|e~_R)N4d761tdFD>*qq$aP z2M4x=dYHeh5275(CV9DI$QbS!c>2Xv?jm=w1|2jX=-$SpqsINohdc|{0M9oT!#j|# z>~_oCk$3q3FTpk6sSdU=_fQ{;$+t;9*EZ&uc)n!cW%wTy7a}7vqcYPi#ly8tdA>V= z*qA9F&)1Y+j1e^eaj& z`PTyb(TT$mXqKdX#%HBlh4}M-S+KBGcZuPPAsy|#wv>OZ@MaNoG6(9H(=s`|%)bUm z*mnYBpw^@D@ylX?m!^&m*Bsy_Bca`PM4$f$g@totdzkte%O&Ws6606eSyzRFAG(ZC4+Z=iSZ5qxUw|sXnXh zD(R#Bb+2f<&RV1R&U1@B&UbzpPG1{kMe-|;T&Q7w-wV{-(Fw0evy9K zWlevhF?@cxP5lJjNOr2hA?z8O1p*1eyoAM?L=u>YlUE^!R1kNvk{WL94- z@;s*$XMH=yulP$?nbfZ(vd?p#{p07H*4h7gzc-?DQ*Ul2ujjnl<2k)u_7>}Xj`pMR zTb(&z8Sp`mQxoebkbmF(4dYo6t8;vt!}_4aBVK35*c#$}vVY_9yeGu#%#W9%|1SK` zls1fl@zOfSJ1Dtwb%-C3`z6PBm-Ig^_o8^6@=^T-$#1l0{kg9cr~c>VUZgndPs-|w zk%kH|hI{v`Gf$T${h-~uF-#?5{ zR5xmKp8vZC{5zj%@=NC5%Hlu6QqM+^!H{387>z-e>UM4XvTY7NV4++{!f%e~QaWFT zJ?mZr0}81RpDAopJ8BdCI5HQbVZ;qnyVbeYcIe&+4DCV-KjNQmCGY&$w9cFECw74Q z0j9~H4fL?vIo9lUB2!h!wX8C@XZy@zODix)li$o`U!LL2W8z`~TSfv8OXbdd}g`+3)Gx{$~4q_Qn3fppR)g#k4KqCZ6HZv$ZA|1-07|MoG!_ zgz(y7Up89fnBezY-KlO{zY}ZDtMJHqb)Fn|@DLoQd<6B=crzTr3;ta52Yu+_k2n_o zq{$V2Ejx9jBhPb-bt~&vr8$E;{4+9a9Djs8nm>Brfj>ps#K3Dk{k3zRu* zcIbor!zji=Aq<|Tkc;tm!-9>B5ovNhlWks-2jY!drXL$0q=$eA7t`c8+dR?$zD%?R zM~0(ooVZXfP0o1od9P?&9@dGi<{|zAU>C9ujbiCBNQdk(?}1twD-WU?SUJkILp3>e zwAB`_NC6>(bN$)Wfw6EDZUI=}vjM|gFtWh3=S?>NdY(^<1{yaxv^H=09QTXt#RYaa zUa$8oeK|X-ew`g{nP8b>oAhrFr8j9m z)34zbk)u2)zGP-LLY8xhBBC!C%(u5X;=RVju{x_?({-tqz5qZsBLvXtpK_&$FcWxb zdq}v?RmuQGf6M_c=<4JCk~uGABLtyfgj|#nBF|4oh<)SuW_f5y-Rxgp?eFzG!W zlKP{1D$vBk|1@Hh#$d{L5kwg;4$*qv%0>zh?Rc?KT2`rezM02gY2Ym6R~Vzt(h;TR dr$=sgD@@t18mFGGHo%;RFHY8ZS5M|A-vgf-f;RvF diff --git a/Assets/Scenes/Greatest_map_ever/NavMesh.asset.meta b/Assets/Scenes/Greatest_map_ever/NavMesh.asset.meta index b38f8d9..10e1563 100755 --- a/Assets/Scenes/Greatest_map_ever/NavMesh.asset.meta +++ b/Assets/Scenes/Greatest_map_ever/NavMesh.asset.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 09beff657ef5d1c4eba194a01e121c1a +guid: 305bb221606d7a748acca94156e2d347 NativeFormatImporter: externalObjects: {} mainObjectFileID: 23800000 diff --git a/Assets/Scenes/tyt player.meta b/Assets/Scenes/tyt player.meta deleted file mode 100755 index 88b6423..0000000 --- a/Assets/Scenes/tyt player.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 8f773973aa2a1aa4aab9bec086b2e8c2 -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/Assets/Scenes/tyt player/Player.prefab b/Assets/Scenes/tyt player/Player.prefab deleted file mode 100755 index 7c588a0..0000000 --- a/Assets/Scenes/tyt player/Player.prefab +++ /dev/null @@ -1,510 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!1 &5245491127989480125 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 5245491127989480103} - - component: {fileID: 5245491127989480102} - - component: {fileID: 5583297852527723678} - - component: {fileID: 5245491127989480120} - - component: {fileID: 5245491127989480100} - - component: {fileID: 5245491127989480121} - - component: {fileID: 5245491127989480123} - - component: {fileID: 5245491127989480122} - - component: {fileID: 8693388664594773644} - m_Layer: 8 - m_Name: Player - m_TagString: Defender - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &5245491127989480103 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_LocalRotation: {x: 0, y: 0.7071068, z: 0, w: 0.7071068} - m_LocalPosition: {x: 28.21, y: 10.9, z: 46.67} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 5245491129196666053} - - {fileID: 5245491129603592455} - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} ---- !u!33 &5245491127989480102 -MeshFilter: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!23 &5583297852527723678 -MeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 2 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 1 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!136 &5245491127989480120 -CapsuleCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - m_Radius: 0.3 - m_Height: 2 - m_Direction: 1 - m_Center: {x: 0, y: 0, z: 0} ---- !u!136 &5245491127989480100 -CapsuleCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - m_Radius: 0.3 - m_Height: 1.3 - m_Direction: 1 - m_Center: {x: 0, y: -0.35, z: 0} ---- !u!136 &5245491127989480121 -CapsuleCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.3 - m_Height: 0.8 - m_Direction: 1 - m_Center: {x: 0, y: -0.6, z: 0} ---- !u!143 &5245491127989480123 -CharacterController: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Height: 2 - m_Radius: 0.5 - m_SlopeLimit: 45 - m_StepOffset: 0.3 - m_SkinWidth: 0.08 - m_MinMoveDistance: 0.001 - m_Center: {x: 0, y: 0, z: 0} ---- !u!114 &5245491127989480122 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 9826297ef4d853741b2af768441ec7f7, type: 3} - m_Name: - m_EditorClassIdentifier: - input_View: {x: 0, y: 0} - cameraHolder: {fileID: 5245491129196666053} - feetTransform: {fileID: 5245491129603592455} - playerSettings: - ViewXSensetivity: 20 - ViewYSensetivity: 20 - ViewXInverted: 0 - ViewYInverted: 0 - SprintingHold: 0 - MovementSmoothing: 0 - RunningForwardSpeed: 10 - RunningStrafeSpeed: 6 - WalkingForwardSpeed: 4 - WalkingBackwardSpeed: 2 - WalkingStrafeSpeed: 3 - JumpingHeight: 6 - JumpingFalloff: 1 - FallingSmoothing: 0 - SpeedEffector: 1 - CrouchSpeedEffector: 0 - ProneSpeedEffector: 0 - FallingSpeedEffector: 0 - ViewClampYMin: -70 - ViewClampYMax: 80 - playerMask: - serializedVersion: 2 - m_Bits: 55 - gravityAmount: 0.05 - gravityMin: -3 - jumpingForce: {x: 0, y: 0, z: 0} - playerStance: 0 - playerStanceSmoothing: 0.2 - playerStandStance: - CameraHeight: 0.7 - StanceCollider: {fileID: 5245491127989480120} - playerCrouchStance: - CameraHeight: 0.3 - StanceCollider: {fileID: 5245491127989480100} - playerProneStance: - CameraHeight: -0.58 - StanceCollider: {fileID: 5245491127989480121} - currentWeapon: {fileID: 8510909888689775086} ---- !u!114 &8693388664594773644 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: a6f2a081cfc8c4b4bb6864331109d147, type: 3} - m_Name: - m_EditorClassIdentifier: - agentParameters: - maxStep: 0 - hasUpgradedFromAgentParameters: 1 - MaxStep: 0 - LastTimeHit: 0 ---- !u!1 &5245491128202443531 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 5245491128202443574} - - component: {fileID: 5245491128202443529} - - component: {fileID: 5245491128202443528} - m_Layer: 8 - m_Name: Camera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &5245491128202443574 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491128202443531} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0.388, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 5245491129196666053} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!20 &5245491128202443529 -Camera: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491128202443531} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 1 - m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} - m_projectionMatrixMode: 1 - m_GateFitMode: 2 - m_FOVAxisMode: 0 - m_SensorSize: {x: 36, y: 24} - m_LensShift: {x: 0, y: 0} - m_FocalLength: 50 - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.1 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!81 &5245491128202443528 -AudioListener: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491128202443531} - m_Enabled: 1 ---- !u!1 &5245491129196666052 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 5245491129196666053} - m_Layer: 8 - m_Name: CameraHolder - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &5245491129196666053 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491129196666052} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0.7, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 5245491128202443574} - - {fileID: 8510909888689775087} - m_Father: {fileID: 5245491127989480103} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &5245491129603592454 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 5245491129603592455} - m_Layer: 8 - m_Name: FeetTransform - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &5245491129603592455 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491129603592454} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: -1, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 5245491127989480103} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &8510909888198732725 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 8510909888198732726} - - component: {fileID: 8510909888198732713} - - component: {fileID: 8510909888198732712} - - component: {fileID: 8510909888198732727} - m_Layer: 8 - m_Name: WeaponModel - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &8510909888198732726 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8510909888198732725} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 8510909888689775087} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!33 &8510909888198732713 -MeshFilter: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8510909888198732725} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!23 &8510909888198732712 -MeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8510909888198732725} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 2 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 1 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!65 &8510909888198732727 -BoxCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8510909888198732725} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!1 &8510909888689775085 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 8510909888689775087} - - component: {fileID: 8510909888689775086} - m_Layer: 8 - m_Name: Weapon - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &8510909888689775087 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8510909888689775085} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0.332, y: 0.038, z: 0.394} - m_LocalScale: {x: 0.16226998, y: 0.1581135, z: 1} - m_Children: - - {fileID: 8510909888198732726} - m_Father: {fileID: 5245491129196666053} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!114 &8510909888689775086 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8510909888689775085} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 088bf904d7c90a44dbb35c1d47c2692e, type: 3} - m_Name: - m_EditorClassIdentifier: - settings: - SwayAmount: 4 - SwayYInverted: 0 - SwayXInverted: 0 - SwaySmoothing: 0.1 - SwayResetSmoothing: 0.1 - SwayClampX: 8 - SwayClampY: 8 diff --git a/Assets/Scenes/tyt player/Player.prefab.meta b/Assets/Scenes/tyt player/Player.prefab.meta deleted file mode 100755 index 1fa6659..0000000 --- a/Assets/Scenes/tyt player/Player.prefab.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: 80f6c1c85e5daed4c96c70205ed5503d -PrefabImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/Assets/Scripts/Bots/CharacterFactory.cs b/Assets/Scripts/Bots/CharacterFactory.cs new file mode 100644 index 0000000..0e8ef59 --- /dev/null +++ b/Assets/Scripts/Bots/CharacterFactory.cs @@ -0,0 +1,59 @@ +using System.Collections.Generic; +using UnityEngine; +using Unity; + +public class CharacterFactory : MonoBehaviour +{ + private CharacterFactory instance; + public CharacterFactory Instance { get { return instance; } } + + [SerializeField] private List spawnPointsForDefendersTeam; + [SerializeField] private List spawnPointsForAttackersTeam; + [SerializeField] private GameObject AIPrefab; + [SerializeField] private GameObject PlayerPrefab; + + private List Players; + + private void Awake() + { + if (instance == null) + instance = this; + else + Destroy(gameObject); + } + + private void Start() + { + var attcNum = SettingsReader.Instance.GetSettings.numOfAttackers; + var defNum = SettingsReader.Instance.GetSettings.numOfDefenders; + var humanDef = SettingsReader.Instance.GetSettings.hasHumanDefender == true ? 1 : 0; + var humanAtc = SettingsReader.Instance.GetSettings.hasHumanAttacker == true ? 1 : 0; + + if (humanAtc == 1 && humanDef == 1) + throw new System.ArgumentException("Can be only one human player"); + + for (int i = 0; i < attcNum - humanAtc; i++) + InstanciateEntity(Team.Attackers, TypeAI.D0DiskAI, + spawnPointsForAttackersTeam[Random.Range(0, spawnPointsForAttackersTeam.Count)]); + for (int i = 0; i < defNum - humanDef; i++) + InstanciateEntity(Team.Defenders, TypeAI.D0DiskAI, + spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)]); + if (humanAtc == 1) + InstanciateEntity(Team.Attackers, TypeAI.HumanAI, + spawnPointsForAttackersTeam[Random.Range(0, spawnPointsForAttackersTeam.Count)]); + if (humanDef == 1) + InstanciateEntity(Team.Defenders, TypeAI.HumanAI, + spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)]); + } + + private void InstanciateEntity(Team team, TypeAI typeAi, NavPoint spawnPoint) + { + var gameobject = GameObject.Instantiate( + typeAi == TypeAI.HumanAI ? PlayerPrefab : AIPrefab, + spawnPoint.position, + Quaternion.identity); + + var character = gameObject.GetComponent(); + character.GetCharacter.Team = team; + } +} \ No newline at end of file diff --git a/Assets/Scripts/Bots/CharacterPooler.cs.meta b/Assets/Scripts/Bots/CharacterFactory.cs.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Scripts/Bots/CharacterPooler.cs.meta rename to Assets/Scripts/Bots/CharacterFactory.cs.meta diff --git a/Assets/Scripts/Bots/CharacterPooler.cs b/Assets/Scripts/Bots/CharacterPooler.cs deleted file mode 100755 index 1dc495a..0000000 --- a/Assets/Scripts/Bots/CharacterPooler.cs +++ /dev/null @@ -1,4 +0,0 @@ -public class CharacterPooler -{ - -} \ No newline at end of file diff --git a/Assets/Scripts/Character/Character.cs b/Assets/Scripts/Character/Character.cs new file mode 100644 index 0000000..a229718 --- /dev/null +++ b/Assets/Scripts/Character/Character.cs @@ -0,0 +1,22 @@ +using UnityEngine; +public class Character +{ + public Team Team { get; set; } + public float LastTimeHit = 0; + public CharacterCondition Condition; + + public Character() + { + Condition = new CharacterCondition(); + } + + public void ResetCharacter() + { + + } +} + +public interface ICharacter +{ + Character GetCharacter { get; } +} \ No newline at end of file diff --git a/Assets/Scripts/Character/Character.cs.meta b/Assets/Scripts/Character/Character.cs.meta new file mode 100644 index 0000000..aec9e7e --- /dev/null +++ b/Assets/Scripts/Character/Character.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 44d6a17ad31b31241928e1a17e9aba37 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Scripts/Character/CharacterCondition.cs b/Assets/Scripts/Character/CharacterCondition.cs index 3844814..d1ec569 100755 --- a/Assets/Scripts/Character/CharacterCondition.cs +++ b/Assets/Scripts/Character/CharacterCondition.cs @@ -1,12 +1,6 @@ using System; using UnityEngine; -public enum NPCState -{ - InCover, - InBlancPoint, - InRunning, -} public class CharacterCondition { @@ -54,9 +48,6 @@ public class CharacterCondition } } - [HideInInspector] - public NPCState npcState { get; private set; } - public CharacterCondition() { var settings = SettingsReader.Instance.GetSettings; diff --git a/Assets/Scripts/Character/MovementController.cs b/Assets/Scripts/Character/MovementController.cs index e45bf60..387a805 100755 --- a/Assets/Scripts/Character/MovementController.cs +++ b/Assets/Scripts/Character/MovementController.cs @@ -14,27 +14,19 @@ public class MovementController : MonoBehaviour navMeshAgent.speed = SettingsReader.Instance.GetSettings.movementSpeed; } - public void Move() - { - var pointCandidate = getPointCandidate(); - goToNextNavPoint(pointCandidate); - } - public void MoveToRandomPoint() { Debug.Log(MapManager.navPoints == null); goToNextNavPoint(MapManager.navPoints[Random.Range(0, MapManager.navPoints.Count)]); } - private NavPoint getPointCandidate() + public List getPointsCandidate() { - var NavPointsPositions = MapManager.navPoints - .Select(point => point.transform.position) - .Where(point => (currentPosition.transform.position - point).magnitude <= SettingsReader.Instance.GetSettings.movementSpeed) + return MapManager.navPoints + .Where(point => (currentPosition.position - point.position).magnitude <= SettingsReader.Instance.GetSettings.movementSpeed) .ToList(); - return null; } public void goToNextNavPoint(NavPoint destination) => - navMeshAgent.SetDestination(destination.transform.position); + navMeshAgent.SetDestination(destination.position); } diff --git a/Assets/Scripts/Character/NPC.cs b/Assets/Scripts/Character/NPC.cs index 776c42f..6ff8f6d 100755 --- a/Assets/Scripts/Character/NPC.cs +++ b/Assets/Scripts/Character/NPC.cs @@ -3,27 +3,50 @@ using UnityEngine; using Unity.MLAgents; using Unity.MLAgents.Sensors; using Unity.MLAgents.Actuators; +using System.Collections.Generic; [RequireComponent(typeof(MovementController))] -public class NPC : Agent +public class NPC : Agent, ICharacter { - public Team Team { get; set; } - [HideInInspector] - private float LastTimeHit; + public Character AgentCharacter; public CharacterCondition Condition; - public MovementController moveController; - + public NPC_BaseState NPC_State { get; private set; } + + public Character GetCharacter => AgentCharacter; + + private NPC_DirectPointState DirectState; + private NPC_InCoverState CoverState; + private NPC_RunningState RunningState; + + private MovementController moveController; + private BufferSensorComponent bufferSensor; + + private void Awake() + { + DirectState = new NPC_DirectPointState(); + CoverState = new NPC_InCoverState(); + RunningState = new NPC_RunningState(); + NPC_State = DirectState; + + AgentCharacter = new Character(); + Condition = AgentCharacter.Condition; + } private void Start() { - Condition = new CharacterCondition(); + AgentCharacter = new Character(); + Condition = AgentCharacter.Condition; + moveController = gameObject.GetComponent(); + bufferSensor = gameObject.GetComponent(); + + GameManager.OnResetScene += AgentCharacter.ResetCharacter; } public override void OnEpisodeBegin() { - + } public override void CollectObservations(VectorSensor sensor) @@ -31,7 +54,28 @@ public class NPC : Agent sensor.AddObservation(Condition.HealthPoints); sensor.AddObservation(Condition.ArmourPoints); sensor.AddObservation(Condition.Ammunition); - sensor.AddObservation((int)Condition.npcState); + sensor.AddObservation((int)NPC_State.State); + + + + var candidates = moveController.getPointsCandidate(); + foreach (var point in candidates) + { + + bufferSensor.AppendObservation(new float[] { + //1 position in navpointId + (float)moveController.currentPosition.PointId, + //2 distance to flag + moveController.currentPosition.FlagDistance, + //3 death count in point + moveController.currentPosition.DeathAttr, + //4 flagEnemyDistance + GameManager.IsCloserToFlagFromNextNavPoint(point, transform.position)==true?1:0, + //5 EnemyVsNavPointDistance + GameManager.IsCloserToEnemyThanToNextNavPoint(point,transform.position, AgentCharacter.Team)==true?1:0 + }); + + } } public override void Heuristic(in ActionBuffers actionsOut) @@ -46,16 +90,28 @@ public class NPC : Agent public override void OnActionReceived(ActionBuffers actions) { if (actions.DiscreteActions[0] == 1) + { moveController.MoveToRandomPoint(); + NPC_State = RunningState; + } } public event Action OnKilledEvent; public void GetDamage(float damage) { + AgentCharacter.LastTimeHit = TimeManager.Instance.CurrentTime; Condition.GiveHealth(-Mathf.RoundToInt(damage * (1 - Condition.ArmourPoints * 0.5f))); Condition.GiveArmour(-Mathf.RoundToInt(Mathf.Sqrt(damage) * 5)); if (Condition.HealthPoints < 0) + { OnKilledEvent?.Invoke(this); + moveController.currentPosition.DeathAttr += 1; + } + } + + private void OnDestroy() + { + Debug.LogWarning("Pooled object was destroyed"); } } diff --git a/Assets/Scripts/Character/NPC_State.cs b/Assets/Scripts/Character/NPC_State.cs new file mode 100644 index 0000000..cc2802c --- /dev/null +++ b/Assets/Scripts/Character/NPC_State.cs @@ -0,0 +1,46 @@ +public enum NPC_EnumState +{ + InCover, + InDirectPoint, + InRunning, +} + +public interface NPC_BaseState +{ + NPC_EnumState State { get; } + bool InCover { get; } + bool IsRunning { get; } + bool InDirectPoint { get; } + float HitChance { get; } + float DoDamageChance { get; } +} + +public class NPC_DirectPointState : NPC_BaseState +{ + public bool InCover => false; + public bool IsRunning => false; + public bool InDirectPoint => false; + public float HitChance => SettingsReader.Instance.GetSettings.GetHitChanceInDirectPoint; + public float DoDamageChance => SettingsReader.Instance.GetSettings.DoDamageChanceInDirectPoint; + public NPC_EnumState State => NPC_EnumState.InDirectPoint; +} + +public class NPC_RunningState : NPC_BaseState +{ + public bool InCover => false; + public bool IsRunning => true; + public bool InDirectPoint => false; + public float HitChance => SettingsReader.Instance.GetSettings.GetHitChanceInRunning; + public float DoDamageChance => SettingsReader.Instance.GetSettings.DoDamageChanceInRunning; + public NPC_EnumState State => NPC_EnumState.InRunning; +} + +public class NPC_InCoverState : NPC_BaseState +{ + public bool InCover => true; + public bool IsRunning => false; + public bool InDirectPoint => false; + public float HitChance => SettingsReader.Instance.GetSettings.GetHitChanceInCover; + public float DoDamageChance => SettingsReader.Instance.GetSettings.DoDamageChanceInCover; + public NPC_EnumState State => NPC_EnumState.InCover; +} diff --git a/Assets/Scripts/Character/NPC_State.cs.meta b/Assets/Scripts/Character/NPC_State.cs.meta new file mode 100644 index 0000000..c1483bf --- /dev/null +++ b/Assets/Scripts/Character/NPC_State.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: a192e433e26797745ad0b46de2586de3 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Scripts/Character/Player.cs b/Assets/Scripts/Character/Player.cs new file mode 100644 index 0000000..53ddb64 --- /dev/null +++ b/Assets/Scripts/Character/Player.cs @@ -0,0 +1,34 @@ +using System; +using UnityEngine; + +public class Player : MonoBehaviour, ICharacter +{ + [HideInInspector] + public Character PlayerCharacter; + public CharacterCondition Condition; + + public Character GetCharacter => PlayerCharacter; + + private void Start() + { + PlayerCharacter = new Character(); + Condition = PlayerCharacter.Condition; + GameManager.OnResetScene += PlayerCharacter.ResetCharacter; + } + + public event Action OnKilledEvent; + public void GetDamage(float damage) + { + PlayerCharacter.LastTimeHit = TimeManager.Instance.CurrentTime; + Condition.GiveHealth(-Mathf.RoundToInt(damage * (1 - Condition.ArmourPoints * 0.5f))); + Condition.GiveArmour(-Mathf.RoundToInt(Mathf.Sqrt(damage) * 5)); + + if (Condition.HealthPoints < 0) + OnKilledEvent?.Invoke(this); + } + + private void OnDestroy() + { + Debug.LogWarning("Pooled object was destroyed"); + } +} diff --git a/Assets/Scripts/Character/Player.cs.meta b/Assets/Scripts/Character/Player.cs.meta new file mode 100644 index 0000000..63af2cb --- /dev/null +++ b/Assets/Scripts/Character/Player.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: a8c9a8e604d395c4ab9d03d28adc4982 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Scripts/Managers/GameManager.cs b/Assets/Scripts/Managers/GameManager.cs index 613ad1e..bdbc7bc 100755 --- a/Assets/Scripts/Managers/GameManager.cs +++ b/Assets/Scripts/Managers/GameManager.cs @@ -1,14 +1,14 @@ -using System.Collections; -using System.Collections.Generic; -using UnityEditorInternal; +using Unity.MLAgents; using UnityEngine; public class GameManager : MonoBehaviour { - private static GameManager instance; public static GameManager Instance { get { return instance; } } + private static SimpleMultiAgentGroup DefendersTeam = new SimpleMultiAgentGroup(); + private static SimpleMultiAgentGroup AttackersTeam = new SimpleMultiAgentGroup(); + private void Awake() { if (Instance == null) @@ -21,13 +21,36 @@ public class GameManager : MonoBehaviour { GlobalEventManager.onCaptureFlag += flagCaptured; GlobalEventManager.onTimeLeft += timeOut; + + var agents = GameObject.FindObjectsOfType(); + foreach (var item in agents) + { + var agent = item as NPC; + if (agent.GetCharacter.Team == Team.Attackers) + AttackersTeam.RegisterAgent(agent); + else + DefendersTeam.RegisterAgent(agent); + } } - private void Update() + public static bool IsCloserToEnemyThanToNextNavPoint(NavPoint navPoint, Vector3 currentTransform, Team team) { - + SimpleMultiAgentGroup agentGroup; + if (team == Team.Attackers) + agentGroup = AttackersTeam; + else + agentGroup = DefendersTeam; + + var distToNavPoint = (currentTransform - navPoint.position).magnitude; + foreach (var agent in agentGroup.GetRegisteredAgents()) + if (distToNavPoint > (currentTransform - agent.transform.position).magnitude) + return true; + return false; } + public static bool IsCloserToFlagFromNextNavPoint(NavPoint navPoint, Vector3 currentTransform) + => navPoint.FlagDistance < (currentTransform - GameObject.FindGameObjectWithTag("Flag").transform.position).magnitude; + private void flagCaptured(Team team) { switch(team) diff --git a/Assets/Scripts/Managers/MapManager.cs b/Assets/Scripts/Managers/MapManager.cs index 4a9451b..8c64208 100755 --- a/Assets/Scripts/Managers/MapManager.cs +++ b/Assets/Scripts/Managers/MapManager.cs @@ -6,12 +6,14 @@ public class MapManager : MonoBehaviour public static List navPoints { get; private set; } private void Start() { + var i = 0; navPoints = new List(); var navPointsGameObj = GameObject.FindGameObjectsWithTag("Point"); foreach (var gameobj in navPointsGameObj) { - Debug.Log(" a "); - navPoints.Add(gameobj.GetComponent()); + var navpoint = gameobj.GetComponent(); + navpoint.PointId = i; i++; + navPoints.Add(navpoint); } } } diff --git a/Assets/Scripts/Managers/TimeManager.cs b/Assets/Scripts/Managers/TimeManager.cs index afdbad8..ea7fec4 100755 --- a/Assets/Scripts/Managers/TimeManager.cs +++ b/Assets/Scripts/Managers/TimeManager.cs @@ -4,8 +4,10 @@ using UnityEngine; public class TimeManager : MonoBehaviour { - public static TimeManager instance = null; - public float CurrentTime; + private static TimeManager instance; + public static TimeManager Instance { get { return instance; } } + + public float CurrentTime { get; private set; } void Start() { if (instance == null) diff --git a/Assets/Scripts/Misc/NavPoint.cs b/Assets/Scripts/Misc/NavPoint.cs index 75066c0..2b43e84 100755 --- a/Assets/Scripts/Misc/NavPoint.cs +++ b/Assets/Scripts/Misc/NavPoint.cs @@ -6,22 +6,15 @@ public class NavPoint : MonoBehaviour { public Vector3 position => gameObject.transform.position; public float FlagDistance { get; private set; } - [System.NonSerialized] public float DeathAttr; - [System.NonSerialized] public List EnemiesSeen; + + [HideInInspector] + public int? PointId; + public float DeathAttr = 0; + public List EnemiesSeen = new List(); //Here other attributes; - [SerializeField] - public int PointId; - - private void Awake() - { - //DO NOT DELETE - } - private void Start() { - FlagDistance = (GameObject.FindGameObjectWithTag("Flag").transform.position - position).magnitude; - EnemiesSeen = new List(); - DeathAttr = 0; + FlagDistance = (GameObject.FindGameObjectWithTag("Flag").transform.position - position).magnitude; } } diff --git a/Assets/Scripts/Misc/Settings.cs b/Assets/Scripts/Misc/Settings.cs index a1e07c3..e01abc1 100755 --- a/Assets/Scripts/Misc/Settings.cs +++ b/Assets/Scripts/Misc/Settings.cs @@ -27,4 +27,11 @@ public class Settings : ScriptableObject public int maxHealth; public int maxArmour; public int maxAmmo; + + public float GetHitChanceInDirectPoint; + public float GetHitChanceInRunning; + public float GetHitChanceInCover; + public float DoDamageChanceInDirectPoint; + public float DoDamageChanceInRunning; + public float DoDamageChanceInCover; } diff --git a/Assets/Scripts/Pickups/PickUpSpawner.cs b/Assets/Scripts/Pickups/PickUpSpawner.cs index 8de3d31..945c1d3 100755 --- a/Assets/Scripts/Pickups/PickUpSpawner.cs +++ b/Assets/Scripts/Pickups/PickUpSpawner.cs @@ -16,6 +16,14 @@ public class PickUpSpawner : MonoBehaviour [SerializeField] private List spawnPoints; + private void Awake() + { + if (instance == null) + instance = this; + else + Destroy(gameObject); + } + private void Start() { pickups = new List(); diff --git a/Assets/Settings/Game Settings.asset b/Assets/Settings/Game Settings.asset index 3652c35..f1a62c0 100755 --- a/Assets/Settings/Game Settings.asset +++ b/Assets/Settings/Game Settings.asset @@ -19,9 +19,9 @@ MonoBehaviour: movementSpeed: 3 defTeamAI: 0 atcTeamAI: 0 - numOfDefenders: 0 - numOfAttackers: 0 - hasHumanDefender: 0 + numOfDefenders: 1 + numOfAttackers: 1 + hasHumanDefender: 1 hasHumanAttacker: 0 healthPickupAmount: 50 armourPickupAmount: 50 @@ -30,3 +30,9 @@ MonoBehaviour: maxHealth: 0 maxArmour: 0 maxAmmo: 0 + GetHitChanceInDirectPoint: 0 + GetHitChanceInRunning: 0 + GetHitChanceInCover: 0 + DoDamageChanceInDirectPoint: 0 + DoDamageChanceInRunning: 0 + DoDamageChanceInCover: 0 diff --git a/Docs/api.md b/Docs/api.md new file mode 100644 index 0000000..e69de29 diff --git a/ProjectSettings/ProjectSettings.asset b/ProjectSettings/ProjectSettings.asset index a8f35bf..dbf6f6c 100644 --- a/ProjectSettings/ProjectSettings.asset +++ b/ProjectSettings/ProjectSettings.asset @@ -627,7 +627,8 @@ PlayerSettings: gcIncremental: 0 assemblyVersionValidation: 1 gcWBarrierValidation: 0 - apiCompatibilityLevelPerPlatform: {} + apiCompatibilityLevelPerPlatform: + Standalone: 3 m_RenderingPath: 1 m_MobileRenderingPath: 1 metroPackageName: Template_3D From e1d0bbc1eb099e32a917e48f8dcee94cfc4c0639 Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Mon, 18 Apr 2022 10:53:21 +0700 Subject: [PATCH 03/27] kirill loh --- .../Timers/Greatest_map_ever_timers.json | 2 +- Assets/Prefabs/Bot.prefab | 17 +++++++- Assets/Scripts/Bots/CharacterFactory.cs | 43 +++++++++++++++++-- Assets/Scripts/Character/Character.cs | 6 +-- .../Scripts/Character/MovementController.cs | 6 +-- Assets/Scripts/Character/NPC.cs | 19 ++++---- Assets/Scripts/Character/NPC.cs.meta | 2 +- Assets/Scripts/Character/Player.cs | 8 +++- Assets/Scripts/Managers/GameManager.cs | 10 +++++ Assets/Scripts/Pickups/AmmoPickUp.cs | 2 +- Assets/Scripts/Pickups/ArmourPickUp.cs | 2 +- Assets/Scripts/Pickups/HealthPickUp.cs | 2 +- 12 files changed, 88 insertions(+), 31 deletions(-) diff --git a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json index 3ed5e46..9a97eb3 100644 --- a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json +++ b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json @@ -1 +1 @@ -{"count":1,"self":16.677081599999998,"total":16.6850819,"children":{"InitializeActuators":{"count":1,"self":0.0020011,"total":0.0020011,"children":null},"InitializeSensors":{"count":1,"self":0.0019998,"total":0.0019998,"children":null},"AgentSendState":{"count":1,"self":0.0020001,"total":0.0030001,"children":{"CollectObservations":{"count":1,"self":0.001,"total":0.001,"children":null}}}},"gauges":{},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1650121997","unity_version":"2019.4.35f1","command_line_arguments":"C:\\Program Files\\unityeditorfolder\\2019.4.35f1\\Editor\\Unity.exe -projectpath F:\\SigmaRiskManagment\\real shooter Git Version -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-BipJTP5Uk-fN-2Lss2TUJ -hubSessionId a1fe6f50-bcbd-11ec-b648-df8ff5c97a4c -accessToken K_G64_ace_5LgpX3riGrgGS92ESag4RH04Ti1lswMX8009f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"Greatest_map_ever","end_time_seconds":"1650122013"}} \ No newline at end of file +{"count":1,"self":16.593504,"total":17.166309,"children":{"InitializeActuators":{"count":2,"self":0.0019996,"total":0.0019996,"children":null},"InitializeSensors":{"count":2,"self":0.0030004,"total":0.0030004,"children":null},"AgentSendState":{"count":612,"self":0.016998,"total":0.549809,"children":{"CollectObservations":{"count":1224,"self":0.0230042,"total":0.0230042,"children":null},"WriteActionMask":{"count":1224,"self":0.0060088,"total":0.0060088,"children":null},"RequestDecision":{"count":1224,"self":0.503798,"total":0.503798,"children":null}}},"DecideAction":{"count":612,"self":0.0089991,"total":0.0089991,"children":null},"AgentAct":{"count":612,"self":0.006997,"total":0.006997,"children":null}},"gauges":{"My Behavior.CumulativeReward":{"count":14,"max":0,"min":0,"runningAverage":0,"value":0,"weightedAverage":0}},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1650253950","unity_version":"2019.4.35f1","command_line_arguments":"C:\\Program Files\\unityeditorfolder\\2019.4.35f1\\Editor\\Unity.exe -projectpath F:\\SigmaRiskManagment\\real shooter Git Version -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-1IWpvtxiu_rvPpHhMWpzt -hubSessionId b1d8b690-be9f-11ec-92bc-6fd1276b6775 -accessToken D1AF5mitRE4Vh3s6p7rpmGuqcqvTsZJKmoGtSNfyNNs009f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"Greatest_map_ever","end_time_seconds":"1650253967"}} \ No newline at end of file diff --git a/Assets/Prefabs/Bot.prefab b/Assets/Prefabs/Bot.prefab index c15183f..eedbf35 100755 --- a/Assets/Prefabs/Bot.prefab +++ b/Assets/Prefabs/Bot.prefab @@ -17,6 +17,7 @@ GameObject: - component: {fileID: 2676446634235362783} - component: {fileID: 8656710265340117963} - component: {fileID: 778652956973742106} + - component: {fileID: 1208561866453126566} m_Layer: 0 m_Name: Bot m_TagString: Untagged @@ -115,7 +116,6 @@ MonoBehaviour: maxStep: 0 hasUpgradedFromAgentParameters: 1 MaxStep: 100 - moveController: {fileID: 778652956973742106} --- !u!114 &7805954453358028498 MonoBehaviour: m_ObjectHideFlags: 0 @@ -195,3 +195,18 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: navMeshAgent: {fileID: 8656710265340117963} +--- !u!114 &1208561866453126566 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2988578997639256874} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: dd8012d5925524537b27131fef517017, type: 3} + m_Name: + m_EditorClassIdentifier: + m_SensorName: BufferSensor + m_ObservableSize: 5 + m_MaxNumObservables: 512 diff --git a/Assets/Scripts/Bots/CharacterFactory.cs b/Assets/Scripts/Bots/CharacterFactory.cs index 0e8ef59..e2c8be3 100644 --- a/Assets/Scripts/Bots/CharacterFactory.cs +++ b/Assets/Scripts/Bots/CharacterFactory.cs @@ -12,7 +12,8 @@ public class CharacterFactory : MonoBehaviour [SerializeField] private GameObject AIPrefab; [SerializeField] private GameObject PlayerPrefab; - private List Players; + private List Bots = new List(); + private GameObject Player; private void Awake() { @@ -44,6 +45,8 @@ public class CharacterFactory : MonoBehaviour if (humanDef == 1) InstanciateEntity(Team.Defenders, TypeAI.HumanAI, spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)]); + + GameManager.OnResetScene += ResetCharacters; } private void InstanciateEntity(Team team, TypeAI typeAi, NavPoint spawnPoint) @@ -52,8 +55,40 @@ public class CharacterFactory : MonoBehaviour typeAi == TypeAI.HumanAI ? PlayerPrefab : AIPrefab, spawnPoint.position, Quaternion.identity); - - var character = gameObject.GetComponent(); - character.GetCharacter.Team = team; + gameobject.SetActive(true); + + if (typeAi == TypeAI.HumanAI) + { + gameobject.GetComponent().GetCharacter.Team = team; + Player = gameobject; + } + else + { + gameobject.GetComponent().GetCharacter.Team = team; + gameobject.GetComponent().currentPosition = spawnPoint; + Bots.Add(gameobject); + } + } + + private void ResetCharacters() + { + foreach (var bot in Bots) + { + var npc = bot.GetComponent(); + npc.ResetCharacter(); + if (npc.GetCharacter.Team == Team.Attackers) + bot.transform.position = spawnPointsForAttackersTeam[Random.Range(0, spawnPointsForAttackersTeam.Count)].position; + else + bot.transform.position = spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)].position; + } + var player = Player.GetComponent(); + if (player != null) + { + player.ResetCharacter(); + if (player.GetCharacter.Team == Team.Attackers) + Player.transform.position = spawnPointsForAttackersTeam[Random.Range(0, spawnPointsForAttackersTeam.Count)].position; + else + Player.transform.position = spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)].position; + } } } \ No newline at end of file diff --git a/Assets/Scripts/Character/Character.cs b/Assets/Scripts/Character/Character.cs index a229718..50e7313 100644 --- a/Assets/Scripts/Character/Character.cs +++ b/Assets/Scripts/Character/Character.cs @@ -7,13 +7,9 @@ public class Character public Character() { + Debug.Log("init"); Condition = new CharacterCondition(); } - - public void ResetCharacter() - { - - } } public interface ICharacter diff --git a/Assets/Scripts/Character/MovementController.cs b/Assets/Scripts/Character/MovementController.cs index 387a805..20e1026 100644 --- a/Assets/Scripts/Character/MovementController.cs +++ b/Assets/Scripts/Character/MovementController.cs @@ -6,12 +6,12 @@ using UnityEngine.AI; [RequireComponent(typeof(NavMeshAgent))] public class MovementController : MonoBehaviour { - public NavPoint currentPosition { get; private set; } + public NavPoint currentPosition { get; set; } [SerializeField] private NavMeshAgent navMeshAgent; private void Start() { - navMeshAgent.speed = SettingsReader.Instance.GetSettings.movementSpeed; + navMeshAgent.speed = SettingsReader.Instance.GetSettings.movementSpeed; } public void MoveToRandomPoint() @@ -23,7 +23,7 @@ public class MovementController : MonoBehaviour public List getPointsCandidate() { return MapManager.navPoints - .Where(point => (currentPosition.position - point.position).magnitude <= SettingsReader.Instance.GetSettings.movementSpeed) + .Where(point => (currentPosition.position - point.position).magnitude < SettingsReader.Instance.GetSettings.movementSpeed) .ToList(); } diff --git a/Assets/Scripts/Character/NPC.cs b/Assets/Scripts/Character/NPC.cs index 6ff8f6d..1069d42 100644 --- a/Assets/Scripts/Character/NPC.cs +++ b/Assets/Scripts/Character/NPC.cs @@ -32,21 +32,21 @@ public class NPC : Agent, ICharacter AgentCharacter = new Character(); Condition = AgentCharacter.Condition; - } - private void Start() - { - AgentCharacter = new Character(); - Condition = AgentCharacter.Condition; moveController = gameObject.GetComponent(); bufferSensor = gameObject.GetComponent(); + } + - GameManager.OnResetScene += AgentCharacter.ResetCharacter; + public void ResetCharacter() + { + Condition = new CharacterCondition(); + EndEpisode(); } public override void OnEpisodeBegin() - { - + { + NPC_State = DirectState; } public override void CollectObservations(VectorSensor sensor) @@ -56,12 +56,9 @@ public class NPC : Agent, ICharacter sensor.AddObservation(Condition.Ammunition); sensor.AddObservation((int)NPC_State.State); - - var candidates = moveController.getPointsCandidate(); foreach (var point in candidates) { - bufferSensor.AppendObservation(new float[] { //1 position in navpointId (float)moveController.currentPosition.PointId, diff --git a/Assets/Scripts/Character/NPC.cs.meta b/Assets/Scripts/Character/NPC.cs.meta index 2ef6c68..232a1c0 100755 --- a/Assets/Scripts/Character/NPC.cs.meta +++ b/Assets/Scripts/Character/NPC.cs.meta @@ -4,7 +4,7 @@ MonoImporter: externalObjects: {} serializedVersion: 2 defaultReferences: [] - executionOrder: 0 + executionOrder: 200 icon: {instanceID: 0} userData: assetBundleName: diff --git a/Assets/Scripts/Character/Player.cs b/Assets/Scripts/Character/Player.cs index 53ddb64..e593f51 100644 --- a/Assets/Scripts/Character/Player.cs +++ b/Assets/Scripts/Character/Player.cs @@ -9,11 +9,15 @@ public class Player : MonoBehaviour, ICharacter public Character GetCharacter => PlayerCharacter; - private void Start() + private void Awake() { PlayerCharacter = new Character(); Condition = PlayerCharacter.Condition; - GameManager.OnResetScene += PlayerCharacter.ResetCharacter; + } + + public void ResetCharacter() + { + Condition = new CharacterCondition(); } public event Action OnKilledEvent; diff --git a/Assets/Scripts/Managers/GameManager.cs b/Assets/Scripts/Managers/GameManager.cs index bdbc7bc..54d1e1d 100755 --- a/Assets/Scripts/Managers/GameManager.cs +++ b/Assets/Scripts/Managers/GameManager.cs @@ -1,5 +1,6 @@ using Unity.MLAgents; using UnityEngine; +using System; public class GameManager : MonoBehaviour { @@ -19,6 +20,8 @@ public class GameManager : MonoBehaviour private void Start() { + Academy.Instance.OnEnvironmentReset += ResetScene; + GlobalEventManager.onCaptureFlag += flagCaptured; GlobalEventManager.onTimeLeft += timeOut; @@ -77,4 +80,11 @@ public class GameManager : MonoBehaviour GlobalEventManager.onCaptureFlag -= flagCaptured; GlobalEventManager.onTimeLeft -= timeOut; } + + public static event Action OnResetScene; + private void ResetScene() + { + Debug.Log("Scene Reset"); + OnResetScene?.Invoke(); + } } diff --git a/Assets/Scripts/Pickups/AmmoPickUp.cs b/Assets/Scripts/Pickups/AmmoPickUp.cs index dffb264..506598d 100755 --- a/Assets/Scripts/Pickups/AmmoPickUp.cs +++ b/Assets/Scripts/Pickups/AmmoPickUp.cs @@ -13,7 +13,7 @@ public class AmmoPickUp : MonoBehaviour, IPickable public void PickObject(GameObject obj) { - obj.GetComponent()?.TakeAmmo(SettingsReader.Instance.GetSettings.ammunitionPickupAmount); + obj.GetComponent()?.GetCharacter.Condition.TakeAmmo(SettingsReader.Instance.GetSettings.ammunitionPickupAmount); gameObject.SetActive(false); } } diff --git a/Assets/Scripts/Pickups/ArmourPickUp.cs b/Assets/Scripts/Pickups/ArmourPickUp.cs index 075896c..86be289 100755 --- a/Assets/Scripts/Pickups/ArmourPickUp.cs +++ b/Assets/Scripts/Pickups/ArmourPickUp.cs @@ -13,7 +13,7 @@ public class ArmourPickUp : MonoBehaviour, IPickable public void PickObject(GameObject obj) { - obj.GetComponent()?.GiveArmour(SettingsReader.Instance.GetSettings.armourPickupAmount); + obj.GetComponent()?.GetCharacter.Condition.GiveArmour(SettingsReader.Instance.GetSettings.armourPickupAmount); gameObject.SetActive(false); } } diff --git a/Assets/Scripts/Pickups/HealthPickUp.cs b/Assets/Scripts/Pickups/HealthPickUp.cs index a3fffb4..f92345a 100755 --- a/Assets/Scripts/Pickups/HealthPickUp.cs +++ b/Assets/Scripts/Pickups/HealthPickUp.cs @@ -13,7 +13,7 @@ public class HealthPickUp : MonoBehaviour, IPickable public void PickObject(GameObject obj) { - obj.GetComponent()?.GiveHealth(SettingsReader.Instance.GetSettings.healthPickupAmount); + obj.GetComponent()?.GetCharacter.Condition.GiveHealth(SettingsReader.Instance.GetSettings.healthPickupAmount); gameObject.SetActive(false); } } From 4bd1e0644a046884ff69c702b39a0ff37b8c223a Mon Sep 17 00:00:00 2001 From: Andrey Gumirov Date: Mon, 18 Apr 2022 19:01:06 +0700 Subject: [PATCH 04/27] latest --- .../Timers/Greatest_map_ever_timers.json | 2 +- Assets/Prefabs/Bot.prefab | 10 ++++----- .../Greatest_map_ever/Greatest_map_ever.unity | 13 ++++++++--- .../Scripts/Character/CharacterCondition.cs | 1 + .../Scripts/Character/MovementController.cs | 19 ++++++++++++++-- Assets/Scripts/Character/NPC.cs | 22 +++++++++++++------ Assets/Scripts/Misc/NavPoint.cs | 3 ++- Assets/Settings/Game Settings.asset | 6 ++--- 8 files changed, 54 insertions(+), 22 deletions(-) mode change 100755 => 100644 Assets/Prefabs/Bot.prefab mode change 100755 => 100644 Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity mode change 100755 => 100644 Assets/Settings/Game Settings.asset diff --git a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json index 9a97eb3..ad80b5f 100644 --- a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json +++ b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json @@ -1 +1 @@ -{"count":1,"self":16.593504,"total":17.166309,"children":{"InitializeActuators":{"count":2,"self":0.0019996,"total":0.0019996,"children":null},"InitializeSensors":{"count":2,"self":0.0030004,"total":0.0030004,"children":null},"AgentSendState":{"count":612,"self":0.016998,"total":0.549809,"children":{"CollectObservations":{"count":1224,"self":0.0230042,"total":0.0230042,"children":null},"WriteActionMask":{"count":1224,"self":0.0060088,"total":0.0060088,"children":null},"RequestDecision":{"count":1224,"self":0.503798,"total":0.503798,"children":null}}},"DecideAction":{"count":612,"self":0.0089991,"total":0.0089991,"children":null},"AgentAct":{"count":612,"self":0.006997,"total":0.006997,"children":null}},"gauges":{"My Behavior.CumulativeReward":{"count":14,"max":0,"min":0,"runningAverage":0,"value":0,"weightedAverage":0}},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1650253950","unity_version":"2019.4.35f1","command_line_arguments":"C:\\Program Files\\unityeditorfolder\\2019.4.35f1\\Editor\\Unity.exe -projectpath F:\\SigmaRiskManagment\\real shooter Git Version -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-1IWpvtxiu_rvPpHhMWpzt -hubSessionId b1d8b690-be9f-11ec-92bc-6fd1276b6775 -accessToken D1AF5mitRE4Vh3s6p7rpmGuqcqvTsZJKmoGtSNfyNNs009f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"Greatest_map_ever","end_time_seconds":"1650253967"}} \ No newline at end of file +{"count":1,"self":16.7881248,"total":28.869280999999997,"children":{"InitializeActuators":{"count":1,"self":0.0062039999999999994,"total":0.0062039999999999994,"children":null},"InitializeSensors":{"count":1,"self":0.009415,"total":0.009415,"children":null},"AgentInfo.ToProto":{"count":1,"self":0.0037809999999999996,"total":0.023178,"children":{"GenerateSensorData":{"count":1,"self":0.019396999999999998,"total":0.019396999999999998,"children":null}}},"AgentSendState":{"count":1094,"self":0.070126,"total":2.03434,"children":{"CollectObservations":{"count":1094,"self":1.694399,"total":1.694399,"children":null},"WriteActionMask":{"count":1094,"self":0.01526,"total":0.01526,"children":null},"RequestDecision":{"count":1094,"self":0.046751999999999995,"total":0.254555,"children":{"AgentInfo.ToProto":{"count":1094,"self":0.027583999999999997,"total":0.207803,"children":{"GenerateSensorData":{"count":1094,"self":0.180219,"total":0.180219,"children":null}}}}}}},"DecideAction":{"count":1094,"self":9.9110736,"total":9.911074,"children":null},"AgentAct":{"count":1094,"self":0.088208,"total":0.090377,"children":{"AgentInfo.ToProto":{"count":10,"self":0.00033,"total":0.002169,"children":{"GenerateSensorData":{"count":10,"self":0.001839,"total":0.001839,"children":null}}}}}},"gauges":{"npc.CumulativeReward":{"count":11,"max":0,"min":0,"runningAverage":0,"value":0,"weightedAverage":0}},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1650283028","unity_version":"2019.4.36f1","command_line_arguments":"\/Applications\/Unity\/Hub\/Editor\/2019.4.36f1\/Unity.app\/Contents\/MacOS\/Unity -projectpath \/Users\/gav\/tmp\/projects\/real-shooter -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-gav -hubSessionId 3b1d3b50-bef1-11ec-a2f1-7fead03718a2 -accessToken r8bZAeodsmy_r-5SjXH5Fv1-VHYleg9acf27P9OS5KU009f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"Greatest_map_ever","end_time_seconds":"1650283056"}} \ No newline at end of file diff --git a/Assets/Prefabs/Bot.prefab b/Assets/Prefabs/Bot.prefab old mode 100755 new mode 100644 index eedbf35..286833e --- a/Assets/Prefabs/Bot.prefab +++ b/Assets/Prefabs/Bot.prefab @@ -143,19 +143,19 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: m_BrainParameters: - VectorObservationSize: 1 + VectorObservationSize: 4 NumStackedVectorObservations: 1 m_ActionSpec: m_NumContinuousActions: 0 - BranchSizes: 01000000 - VectorActionSize: 01000000 + BranchSizes: 0200000002000000 + VectorActionSize: 0200000002000000 VectorActionDescriptions: [] VectorActionSpaceType: 0 hasUpgradedBrainParametersWithActionSpec: 1 m_Model: {fileID: 0} m_InferenceDevice: 0 m_BehaviorType: 0 - m_BehaviorName: My Behavior + m_BehaviorName: npc TeamId: 0 m_UseChildSensors: 1 m_UseChildActuators: 1 @@ -209,4 +209,4 @@ MonoBehaviour: m_EditorClassIdentifier: m_SensorName: BufferSensor m_ObservableSize: 5 - m_MaxNumObservables: 512 + m_MaxNumObservables: 5 diff --git a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity old mode 100755 new mode 100644 index 41d83a6..f858886 --- a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity +++ b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity @@ -38,7 +38,7 @@ RenderSettings: m_ReflectionIntensity: 1 m_CustomReflection: {fileID: 0} m_Sun: {fileID: 705507994} - m_IndirectSpecularColor: {r: 0.44657898, g: 0.4964133, b: 0.5748178, a: 1} + m_IndirectSpecularColor: {r: 0.44657815, g: 0.49641192, b: 0.57481617, a: 1} m_UseRadianceAmbientProbe: 0 --- !u!157 &3 LightmapSettings: @@ -259,6 +259,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: + PointId: 0 DeathAttr: 0 EnemiesSeen: [] --- !u!23 &140697607 @@ -353,6 +354,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: + PointId: 0 DeathAttr: 0 EnemiesSeen: [] --- !u!23 &293522541 @@ -1315,6 +1317,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: + PointId: 0 DeathAttr: 0 EnemiesSeen: [] --- !u!23 &1116745545 @@ -1745,7 +1748,7 @@ Transform: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1345085340} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 47.446796, y: -5.782543, z: -25.400002} + m_LocalPosition: {x: 21.51, y: -5.782543, z: -10.46} m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} m_Children: [] m_Father: {fileID: 2060099472} @@ -1763,6 +1766,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: + PointId: 0 DeathAttr: 0 EnemiesSeen: [] --- !u!23 &1345085343 @@ -1857,6 +1861,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: + PointId: 0 DeathAttr: 0 EnemiesSeen: [] --- !u!23 &1488699524 @@ -2077,6 +2082,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: + PointId: 0 DeathAttr: 0 EnemiesSeen: [] --- !u!23 &1663305224 @@ -2251,7 +2257,7 @@ MonoBehaviour: - {fileID: 2004854094} AIPrefab: {fileID: 2988578997639256874, guid: b016874eb34cc084aa4359f0bbec50e1, type: 3} - PlayerPrefab: {fileID: 5245491127989480125, guid: 80f6c1c85e5daed4c96c70205ed5503d, + PlayerPrefab: {fileID: 5245491127989480125, guid: 99bb17a23a489624baeaf337f91a4f84, type: 3} --- !u!1 &1871257865 GameObject: @@ -2506,6 +2512,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: + PointId: 0 DeathAttr: 0 EnemiesSeen: [] --- !u!23 &2004854095 diff --git a/Assets/Scripts/Character/CharacterCondition.cs b/Assets/Scripts/Character/CharacterCondition.cs index d1ec569..33b15d5 100755 --- a/Assets/Scripts/Character/CharacterCondition.cs +++ b/Assets/Scripts/Character/CharacterCondition.cs @@ -9,6 +9,7 @@ public class CharacterCondition public event Action OnChangeAmmunitionEvent; private int health; + public int HealthPoints { get diff --git a/Assets/Scripts/Character/MovementController.cs b/Assets/Scripts/Character/MovementController.cs index 20e1026..71997d6 100644 --- a/Assets/Scripts/Character/MovementController.cs +++ b/Assets/Scripts/Character/MovementController.cs @@ -7,13 +7,28 @@ using UnityEngine.AI; public class MovementController : MonoBehaviour { public NavPoint currentPosition { get; set; } + private Dictionary navPoints = new Dictionary(); + [SerializeField] private NavMeshAgent navMeshAgent; private void Start() { - navMeshAgent.speed = SettingsReader.Instance.GetSettings.movementSpeed; + navMeshAgent.speed = SettingsReader.Instance.GetSettings.movementSpeed; + foreach (var np in MapManager.navPoints) { + navPoints[np.PointId] = np; + } } + public void MoveToPointById(int id) + { + if (!navPoints.ContainsKey(id)) + { + Debug.LogWarning("PIDOR"); + return; + } + goToNextNavPoint(navPoints[id]); + } + public void MoveToRandomPoint() { Debug.Log(MapManager.navPoints == null); @@ -23,7 +38,7 @@ public class MovementController : MonoBehaviour public List getPointsCandidate() { return MapManager.navPoints - .Where(point => (currentPosition.position - point.position).magnitude < SettingsReader.Instance.GetSettings.movementSpeed) + .Where(point => (currentPosition.position - point.position).magnitude < SettingsReader.Instance.GetSettings.movementDistance) .ToList(); } diff --git a/Assets/Scripts/Character/NPC.cs b/Assets/Scripts/Character/NPC.cs index 1069d42..7eb27c2 100644 --- a/Assets/Scripts/Character/NPC.cs +++ b/Assets/Scripts/Character/NPC.cs @@ -54,24 +54,29 @@ public class NPC : Agent, ICharacter sensor.AddObservation(Condition.HealthPoints); sensor.AddObservation(Condition.ArmourPoints); sensor.AddObservation(Condition.Ammunition); - sensor.AddObservation((int)NPC_State.State); + sensor.AddObservation((int) NPC_State.State); var candidates = moveController.getPointsCandidate(); foreach (var point in candidates) { - bufferSensor.AppendObservation(new float[] { + var parray = new float[] + { //1 position in navpointId - (float)moveController.currentPosition.PointId, + (float) moveController.currentPosition.PointId, //2 distance to flag moveController.currentPosition.FlagDistance, //3 death count in point moveController.currentPosition.DeathAttr, //4 flagEnemyDistance - GameManager.IsCloserToFlagFromNextNavPoint(point, transform.position)==true?1:0, + GameManager.IsCloserToFlagFromNextNavPoint(point, transform.position) == true ? 1 : 0, //5 EnemyVsNavPointDistance - GameManager.IsCloserToEnemyThanToNextNavPoint(point,transform.position, AgentCharacter.Team)==true?1:0 - }); - + GameManager.IsCloserToEnemyThanToNextNavPoint(point, transform.position, AgentCharacter.Team) == true + ? 1 + : 0 + }; + var _parray = string.Join(" ", parray); + Debug.Log("OBS: " + _parray); + bufferSensor.AppendObservation(parray); } } @@ -90,6 +95,9 @@ public class NPC : Agent, ICharacter { moveController.MoveToRandomPoint(); NPC_State = RunningState; + } else if (actions.DiscreteActions[0] == 2) + { + moveController.MoveToPointById(actions.DiscreteActions[1]); } } diff --git a/Assets/Scripts/Misc/NavPoint.cs b/Assets/Scripts/Misc/NavPoint.cs index 2b43e84..b36d9e8 100755 --- a/Assets/Scripts/Misc/NavPoint.cs +++ b/Assets/Scripts/Misc/NavPoint.cs @@ -8,13 +8,14 @@ public class NavPoint : MonoBehaviour public float FlagDistance { get; private set; } [HideInInspector] - public int? PointId; + public int PointId; public float DeathAttr = 0; public List EnemiesSeen = new List(); //Here other attributes; private void Start() { + PointId = GetInstanceID(); FlagDistance = (GameObject.FindGameObjectWithTag("Flag").transform.position - position).magnitude; } } diff --git a/Assets/Settings/Game Settings.asset b/Assets/Settings/Game Settings.asset old mode 100755 new mode 100644 index f1a62c0..98a5b56 --- a/Assets/Settings/Game Settings.asset +++ b/Assets/Settings/Game Settings.asset @@ -27,9 +27,9 @@ MonoBehaviour: armourPickupAmount: 50 ammunitionPickupAmount: 120 pickupsAmount: 2 - maxHealth: 0 - maxArmour: 0 - maxAmmo: 0 + maxHealth: 100 + maxArmour: 100 + maxAmmo: 31337 GetHitChanceInDirectPoint: 0 GetHitChanceInRunning: 0 GetHitChanceInCover: 0 From aae98595d3fdeea2a7ef8ca7bbff1221302df705 Mon Sep 17 00:00:00 2001 From: Andrey Gumirov Date: Tue, 19 Apr 2022 19:24:53 +0700 Subject: [PATCH 05/27] Small fizes --- Assets/ML-Agents/Timers/Greatest_map_ever_timers.json | 2 +- Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity | 4 ++-- Assets/Scripts/Character/MovementController.cs | 2 +- Assets/Scripts/Character/NPC.cs | 4 ++-- Assets/Scripts/Managers/GameManager.cs | 2 ++ 5 files changed, 8 insertions(+), 6 deletions(-) diff --git a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json index ad80b5f..6fe3a04 100644 --- a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json +++ b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json @@ -1 +1 @@ -{"count":1,"self":16.7881248,"total":28.869280999999997,"children":{"InitializeActuators":{"count":1,"self":0.0062039999999999994,"total":0.0062039999999999994,"children":null},"InitializeSensors":{"count":1,"self":0.009415,"total":0.009415,"children":null},"AgentInfo.ToProto":{"count":1,"self":0.0037809999999999996,"total":0.023178,"children":{"GenerateSensorData":{"count":1,"self":0.019396999999999998,"total":0.019396999999999998,"children":null}}},"AgentSendState":{"count":1094,"self":0.070126,"total":2.03434,"children":{"CollectObservations":{"count":1094,"self":1.694399,"total":1.694399,"children":null},"WriteActionMask":{"count":1094,"self":0.01526,"total":0.01526,"children":null},"RequestDecision":{"count":1094,"self":0.046751999999999995,"total":0.254555,"children":{"AgentInfo.ToProto":{"count":1094,"self":0.027583999999999997,"total":0.207803,"children":{"GenerateSensorData":{"count":1094,"self":0.180219,"total":0.180219,"children":null}}}}}}},"DecideAction":{"count":1094,"self":9.9110736,"total":9.911074,"children":null},"AgentAct":{"count":1094,"self":0.088208,"total":0.090377,"children":{"AgentInfo.ToProto":{"count":10,"self":0.00033,"total":0.002169,"children":{"GenerateSensorData":{"count":10,"self":0.001839,"total":0.001839,"children":null}}}}}},"gauges":{"npc.CumulativeReward":{"count":11,"max":0,"min":0,"runningAverage":0,"value":0,"weightedAverage":0}},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1650283028","unity_version":"2019.4.36f1","command_line_arguments":"\/Applications\/Unity\/Hub\/Editor\/2019.4.36f1\/Unity.app\/Contents\/MacOS\/Unity -projectpath \/Users\/gav\/tmp\/projects\/real-shooter -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-gav -hubSessionId 3b1d3b50-bef1-11ec-a2f1-7fead03718a2 -accessToken r8bZAeodsmy_r-5SjXH5Fv1-VHYleg9acf27P9OS5KU009f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"Greatest_map_ever","end_time_seconds":"1650283056"}} \ No newline at end of file +{"count":1,"self":253.933696,"total":298.669542,"children":{"InitializeActuators":{"count":1,"self":0.005415,"total":0.005415,"children":null},"InitializeSensors":{"count":1,"self":0.008598,"total":0.008598,"children":null},"AgentInfo.ToProto":{"count":1,"self":0.0036699999999999997,"total":0.021755999999999998,"children":{"GenerateSensorData":{"count":1,"self":0.018085999999999998,"total":0.018085999999999998,"children":null}}},"AgentSendState":{"count":6944,"self":0.233448,"total":1.314088,"children":{"CollectObservations":{"count":6944,"self":0.32020899999999997,"total":0.32020899999999997,"children":null},"WriteActionMask":{"count":6944,"self":0.051706999999999996,"total":0.051706999999999996,"children":null},"RequestDecision":{"count":6944,"self":0.135635,"total":0.708724,"children":{"AgentInfo.ToProto":{"count":6944,"self":0.088769,"total":0.573089,"children":{"GenerateSensorData":{"count":6944,"self":0.48432,"total":0.48432,"children":null}}}}}}},"DecideAction":{"count":6944,"self":43.1909152,"total":43.190914,"children":null},"AgentAct":{"count":6944,"self":0.183273,"total":0.18875699999999998,"children":{"AgentInfo.ToProto":{"count":69,"self":0.000744,"total":0.005484,"children":{"GenerateSensorData":{"count":69,"self":0.0047399999999999994,"total":0.0047399999999999994,"children":null}}}}}},"gauges":{"npc.CumulativeReward":{"count":70,"max":0,"min":0,"runningAverage":0,"value":0,"weightedAverage":0}},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1650370738","unity_version":"2019.4.36f1","command_line_arguments":"\/Applications\/Unity\/Hub\/Editor\/2019.4.36f1\/Unity.app\/Contents\/MacOS\/Unity -projectpath \/Users\/gav\/tmp\/projects\/real-shooter -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-gav -hubSessionId 6cd92410-bfd2-11ec-8ee1-a9a435d1c35b -accessToken 8rpFnsCAS7iPqYsIEudq2a-EfGjvYCbkRj1LbKfo7EM009f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"Greatest_map_ever","end_time_seconds":"1650371036"}} \ No newline at end of file diff --git a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity index f858886..e0274e5 100644 --- a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity +++ b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity @@ -1122,7 +1122,7 @@ Transform: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 705507993} m_LocalRotation: {x: 0.40821788, y: -0.23456968, z: 0.10938163, w: 0.8754261} - m_LocalPosition: {x: 77.5, y: 55.2, z: -5.9} + m_LocalPosition: {x: 81.2, y: 64.1, z: -12.4} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} @@ -2600,7 +2600,7 @@ Transform: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 2060099471} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 19.553204, y: 7.782543, z: -15} + m_LocalPosition: {x: 27.79, y: 7.782543, z: -15} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - {fileID: 1345085341} diff --git a/Assets/Scripts/Character/MovementController.cs b/Assets/Scripts/Character/MovementController.cs index 71997d6..e848ddb 100644 --- a/Assets/Scripts/Character/MovementController.cs +++ b/Assets/Scripts/Character/MovementController.cs @@ -31,7 +31,7 @@ public class MovementController : MonoBehaviour public void MoveToRandomPoint() { - Debug.Log(MapManager.navPoints == null); + // Debug.Log(MapManager.navPoints == null); goToNextNavPoint(MapManager.navPoints[Random.Range(0, MapManager.navPoints.Count)]); } diff --git a/Assets/Scripts/Character/NPC.cs b/Assets/Scripts/Character/NPC.cs index 7eb27c2..cf8aa9b 100644 --- a/Assets/Scripts/Character/NPC.cs +++ b/Assets/Scripts/Character/NPC.cs @@ -74,8 +74,8 @@ public class NPC : Agent, ICharacter ? 1 : 0 }; - var _parray = string.Join(" ", parray); - Debug.Log("OBS: " + _parray); + // var _parray = string.Join(" ", parray); + // Debug.Log("OBS: " + _parray); bufferSensor.AppendObservation(parray); } } diff --git a/Assets/Scripts/Managers/GameManager.cs b/Assets/Scripts/Managers/GameManager.cs index 54d1e1d..1dab597 100755 --- a/Assets/Scripts/Managers/GameManager.cs +++ b/Assets/Scripts/Managers/GameManager.cs @@ -60,9 +60,11 @@ public class GameManager : MonoBehaviour { case Team.Attackers: Debug.Log("Attackers Win"); + ResetScene(); break; case Team.Defenders: Debug.Log("Defenders Win"); + ResetScene(); break; default: Debug.LogError("Unexpected Team"); From dfdd018a5f6a4ccd6288ea86d0f49aedc6817d21 Mon Sep 17 00:00:00 2001 From: Andrey Gumirov Date: Tue, 19 Apr 2022 19:25:45 +0700 Subject: [PATCH 06/27] added mlagent jsons to gitignore --- .gitignore | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.gitignore b/.gitignore index c2e55b3..b0e4fab 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,11 @@ # # Get latest from https://github.com/github/gitignore/blob/main/Unity.gitignore # + +# MLagents +Assets/ML-Agents/Timers/* +# + /[Ll]ibrary/ /[Tt]emp/ /[Oo]bj/ From 1e4f90c0edb8e6bdc60ea731c4723501455ceddc Mon Sep 17 00:00:00 2001 From: Andrey Gumirov Date: Tue, 19 Apr 2022 19:31:15 +0700 Subject: [PATCH 07/27] Removed useless temporary files --- Assets/ML-Agents/Timers/DemoScene01_timers.json | 1 - Assets/ML-Agents/Timers/DemoScene01_timers.json.meta | 7 ------- Assets/ML-Agents/Timers/Greatest_map_ever_timers.json.meta | 7 ------- Assets/ML-Agents/Timers/dont touch me plz_timers.json | 1 - Assets/ML-Agents/Timers/dont touch me plz_timers.json.meta | 7 ------- 5 files changed, 23 deletions(-) delete mode 100755 Assets/ML-Agents/Timers/DemoScene01_timers.json delete mode 100755 Assets/ML-Agents/Timers/DemoScene01_timers.json.meta delete mode 100755 Assets/ML-Agents/Timers/Greatest_map_ever_timers.json.meta delete mode 100755 Assets/ML-Agents/Timers/dont touch me plz_timers.json delete mode 100755 Assets/ML-Agents/Timers/dont touch me plz_timers.json.meta diff --git a/Assets/ML-Agents/Timers/DemoScene01_timers.json b/Assets/ML-Agents/Timers/DemoScene01_timers.json deleted file mode 100755 index 3914503..0000000 --- a/Assets/ML-Agents/Timers/DemoScene01_timers.json +++ /dev/null @@ -1 +0,0 @@ -{"count":1,"self":249.99626239999998,"total":250.70272609999998,"children":{"InitializeActuators":{"count":1,"self":0.0005131,"total":0.0005131,"children":null},"AgentSendState":{"count":8389,"self":0.1187579,"total":0.1187579,"children":null},"DecideAction":{"count":8389,"self":0.5871921,"total":0.5871921,"children":null}},"gauges":{},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1649259643","unity_version":"2019.4.36f1","command_line_arguments":"C:\\Program Files\\Unity\\Hub\\Editor\\2019.4.36f1\\Editor\\Unity.exe -projectpath D:\\real_shooter -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-user -hubSessionId 91406950-b5ad-11ec-a63c-e7b76cbae13d -accessToken EBt97pMhHqClFDnjD_uh-3vplxP-uI2yS0WK-hSxfuM012f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"DemoScene01","end_time_seconds":"1649259893"}} \ No newline at end of file diff --git a/Assets/ML-Agents/Timers/DemoScene01_timers.json.meta b/Assets/ML-Agents/Timers/DemoScene01_timers.json.meta deleted file mode 100755 index fe18e4a..0000000 --- a/Assets/ML-Agents/Timers/DemoScene01_timers.json.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: 1b328c4e26e7a994e8a42f269ca7419b -TextScriptImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json.meta b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json.meta deleted file mode 100755 index 1400775..0000000 --- a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: 80ef0d75029e25243857877facd14d75 -TextScriptImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/Assets/ML-Agents/Timers/dont touch me plz_timers.json b/Assets/ML-Agents/Timers/dont touch me plz_timers.json deleted file mode 100755 index 699e26c..0000000 --- a/Assets/ML-Agents/Timers/dont touch me plz_timers.json +++ /dev/null @@ -1 +0,0 @@ -{"count":1,"self":14.5494256,"total":14.5766034,"children":{"InitializeActuators":{"count":1,"self":0,"total":0,"children":null},"AgentSendState":{"count":497,"self":0.0020012999999999997,"total":0.0020012999999999997,"children":null},"DecideAction":{"count":497,"self":0.0241768,"total":0.0241768,"children":null}},"gauges":{},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1649741696","unity_version":"2019.4.35f1","command_line_arguments":"D:\\Unity\\2019.4.35f1\\Editor\\Unity.exe -projectpath C:\\Users\\kiril\\real-shooter -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-kiril -hubSessionId 21e9de90-ba14-11ec-9398-079edccf5e34 -accessToken oIpf_7gKWQQRilVTeJehUsFhrxasdzsG_K3j5Swtgx0009f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"dont touch me plz","end_time_seconds":"1649741711"}} \ No newline at end of file diff --git a/Assets/ML-Agents/Timers/dont touch me plz_timers.json.meta b/Assets/ML-Agents/Timers/dont touch me plz_timers.json.meta deleted file mode 100755 index db7171f..0000000 --- a/Assets/ML-Agents/Timers/dont touch me plz_timers.json.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: 249f8a9af2b0bc041a08a0009a6fdf44 -TextScriptImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: From 290f5515b71edb1a7f342c7a298a8b0c94de34b9 Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Mon, 25 Apr 2022 16:23:25 +0700 Subject: [PATCH 08/27] update scripts --- Assets/Scripts/Bots/CharacterFactory.cs | 18 +++++---- .../Scripts/Character/CharacterCondition.cs | 17 +++++++-- .../Scripts/Character/MovementController.cs | 21 ++++++++-- Assets/Scripts/Character/NPC.cs | 27 ++++++++----- Assets/Scripts/Managers/GameManager.cs | 14 +++++++ Assets/Scripts/Misc/FlagZone.cs | 6 +-- Assets/Scripts/Misc/Settings.cs | 38 ++++++++++--------- Assets/Scripts/Pickups/AmmoPickUp.cs | 2 +- Assets/Scripts/Pickups/ArmourPickUp.cs | 2 +- Assets/Scripts/Pickups/HealthPickUp.cs | 2 +- Assets/Scripts/Pickups/PickUpSpawner.cs | 2 +- Assets/Scripts/Utils/BoolToInteger.cs | 7 ++++ Assets/Scripts/Utils/BoolToInteger.cs.meta | 11 ++++++ 13 files changed, 120 insertions(+), 47 deletions(-) create mode 100644 Assets/Scripts/Utils/BoolToInteger.cs create mode 100644 Assets/Scripts/Utils/BoolToInteger.cs.meta diff --git a/Assets/Scripts/Bots/CharacterFactory.cs b/Assets/Scripts/Bots/CharacterFactory.cs index e2c8be3..ab18289 100644 --- a/Assets/Scripts/Bots/CharacterFactory.cs +++ b/Assets/Scripts/Bots/CharacterFactory.cs @@ -25,10 +25,10 @@ public class CharacterFactory : MonoBehaviour private void Start() { - var attcNum = SettingsReader.Instance.GetSettings.numOfAttackers; - var defNum = SettingsReader.Instance.GetSettings.numOfDefenders; - var humanDef = SettingsReader.Instance.GetSettings.hasHumanDefender == true ? 1 : 0; - var humanAtc = SettingsReader.Instance.GetSettings.hasHumanAttacker == true ? 1 : 0; + var attcNum = SettingsReader.Instance.GetSettings.NumOfAttackers; + var defNum = SettingsReader.Instance.GetSettings.NumOfDefenders; + var humanDef = SettingsReader.Instance.GetSettings.HasHumanDefender == true ? 1 : 0; + var humanAtc = SettingsReader.Instance.GetSettings.HasHumanAttacker == true ? 1 : 0; if (humanAtc == 1 && humanDef == 1) throw new System.ArgumentException("Can be only one human player"); @@ -56,6 +56,10 @@ public class CharacterFactory : MonoBehaviour spawnPoint.position, Quaternion.identity); gameobject.SetActive(true); + if (team == Team.Attackers) + gameObject.tag = "Attacker"; + else + gameObject.tag = "Defender"; if (typeAi == TypeAI.HumanAI) { @@ -65,7 +69,7 @@ public class CharacterFactory : MonoBehaviour else { gameobject.GetComponent().GetCharacter.Team = team; - gameobject.GetComponent().currentPosition = spawnPoint; + gameobject.GetComponent().CurrentNavPoint = spawnPoint; Bots.Add(gameobject); } } @@ -81,8 +85,8 @@ public class CharacterFactory : MonoBehaviour else bot.transform.position = spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)].position; } - var player = Player.GetComponent(); - if (player != null) + Player player; + if (TryGetComponent(out player)) { player.ResetCharacter(); if (player.GetCharacter.Team == Team.Attackers) diff --git a/Assets/Scripts/Character/CharacterCondition.cs b/Assets/Scripts/Character/CharacterCondition.cs index d1ec569..b674222 100755 --- a/Assets/Scripts/Character/CharacterCondition.cs +++ b/Assets/Scripts/Character/CharacterCondition.cs @@ -21,6 +21,17 @@ public class CharacterCondition OnChangeHealthEvent?.Invoke(value); } } + + public int GetHealthPointsInQuantile() + { + if (health < 25) + return 0; + else if (health < 50) + return 1; + else if (health < 75) + return 2; + else return 3; + } private int armour; public int ArmourPoints { @@ -51,9 +62,9 @@ public class CharacterCondition public CharacterCondition() { var settings = SettingsReader.Instance.GetSettings; - ammo = settings.maxAmmo; - health = settings.maxHealth; - armour = settings.maxArmour; + ammo = settings.MaxAmmo; + health = settings.MaxHealth; + armour = settings.MaxArmour; } public void GiveHealth(int health) => HealthPoints = Mathf.Clamp(health + HealthPoints, 0, 100); diff --git a/Assets/Scripts/Character/MovementController.cs b/Assets/Scripts/Character/MovementController.cs index 20e1026..334b7d2 100644 --- a/Assets/Scripts/Character/MovementController.cs +++ b/Assets/Scripts/Character/MovementController.cs @@ -2,16 +2,31 @@ using System.Collections.Generic; using UnityEngine; using UnityEngine.AI; +using System.Threading.Tasks; [RequireComponent(typeof(NavMeshAgent))] public class MovementController : MonoBehaviour { - public NavPoint currentPosition { get; set; } + public NavPoint CurrentNavPoint { get; set; } + public float FlagDistance { get; private set; } + private GameObject flag; + private const float updateFlagPositionDelay = 5; [SerializeField] private NavMeshAgent navMeshAgent; private void Start() { - navMeshAgent.speed = SettingsReader.Instance.GetSettings.movementSpeed; + navMeshAgent.speed = SettingsReader.Instance.GetSettings.MovementSpeed; + InvokeRepeating(nameof(UpdateFlagPosition), 0, updateFlagPositionDelay); + } + + private void OnDestroy() + { + CancelInvoke(nameof(UpdateFlagPosition)); + } + + private void UpdateFlagPosition() + { + FlagDistance = (flag.transform.position - gameObject.transform.position).magnitude; } public void MoveToRandomPoint() @@ -23,7 +38,7 @@ public class MovementController : MonoBehaviour public List getPointsCandidate() { return MapManager.navPoints - .Where(point => (currentPosition.position - point.position).magnitude < SettingsReader.Instance.GetSettings.movementSpeed) + .Where(point => (CurrentNavPoint.position - point.position).magnitude < SettingsReader.Instance.GetSettings.MovementSpeed) .ToList(); } diff --git a/Assets/Scripts/Character/NPC.cs b/Assets/Scripts/Character/NPC.cs index 1069d42..c738d0b 100644 --- a/Assets/Scripts/Character/NPC.cs +++ b/Assets/Scripts/Character/NPC.cs @@ -3,7 +3,6 @@ using UnityEngine; using Unity.MLAgents; using Unity.MLAgents.Sensors; using Unity.MLAgents.Actuators; -using System.Collections.Generic; [RequireComponent(typeof(MovementController))] public class NPC : Agent, ICharacter @@ -11,6 +10,7 @@ public class NPC : Agent, ICharacter [HideInInspector] public Character AgentCharacter; public CharacterCondition Condition; + private FlagZone flagZone; public NPC_BaseState NPC_State { get; private set; } @@ -47,29 +47,38 @@ public class NPC : Agent, ICharacter public override void OnEpisodeBegin() { NPC_State = DirectState; + flagZone = GameObject.FindObjectOfType(); } public override void CollectObservations(VectorSensor sensor) { + var candidates = moveController.getPointsCandidate(); + sensor.AddObservation(Condition.HealthPoints); sensor.AddObservation(Condition.ArmourPoints); sensor.AddObservation(Condition.Ammunition); sensor.AddObservation((int)NPC_State.State); - - var candidates = moveController.getPointsCandidate(); + sensor.AddObservation((!flagZone.isNotOccup).ToInt()); + sensor.AddObservation(AgentCharacter.LastTimeHit); + sensor.AddObservation(Condition.GetHealthPointsInQuantile()); + sensor.AddObservation(candidates.Count); + sensor.AddObservation(GameManager.IsEnemyNearby(gameObject.transform.position, AgentCharacter.Team)); + foreach (var point in candidates) { + Debug.Log((float)moveController.CurrentNavPoint.PointId); + bufferSensor.AppendObservation(new float[] { //1 position in navpointId - (float)moveController.currentPosition.PointId, + (float)moveController.CurrentNavPoint.PointId, //2 distance to flag - moveController.currentPosition.FlagDistance, + moveController.FlagDistance, //3 death count in point - moveController.currentPosition.DeathAttr, + moveController.CurrentNavPoint.DeathAttr, //4 flagEnemyDistance - GameManager.IsCloserToFlagFromNextNavPoint(point, transform.position)==true?1:0, + GameManager.IsCloserToFlagFromNextNavPoint(point, transform.position).ToInt(), //5 EnemyVsNavPointDistance - GameManager.IsCloserToEnemyThanToNextNavPoint(point,transform.position, AgentCharacter.Team)==true?1:0 + GameManager.IsCloserToEnemyThanToNextNavPoint(point,transform.position, AgentCharacter.Team).ToInt() }); } @@ -103,7 +112,7 @@ public class NPC : Agent, ICharacter if (Condition.HealthPoints < 0) { OnKilledEvent?.Invoke(this); - moveController.currentPosition.DeathAttr += 1; + moveController.CurrentNavPoint.DeathAttr += 1; } } diff --git a/Assets/Scripts/Managers/GameManager.cs b/Assets/Scripts/Managers/GameManager.cs index 54d1e1d..b91908d 100755 --- a/Assets/Scripts/Managers/GameManager.cs +++ b/Assets/Scripts/Managers/GameManager.cs @@ -51,6 +51,20 @@ public class GameManager : MonoBehaviour return false; } + public static bool IsEnemyNearby(Vector3 currentTransform, Team team) + { + SimpleMultiAgentGroup agentGroup; + if (team == Team.Attackers) + agentGroup = AttackersTeam; + else + agentGroup = DefendersTeam; + + foreach (var agent in agentGroup.GetRegisteredAgents()) + if ((currentTransform - agent.transform.position).magnitude < SettingsReader.Instance.GetSettings.ViewDistance) + return true; + return false; + } + public static bool IsCloserToFlagFromNextNavPoint(NavPoint navPoint, Vector3 currentTransform) => navPoint.FlagDistance < (currentTransform - GameObject.FindGameObjectWithTag("Flag").transform.position).magnitude; diff --git a/Assets/Scripts/Misc/FlagZone.cs b/Assets/Scripts/Misc/FlagZone.cs index 8cd4ab4..c6328dd 100755 --- a/Assets/Scripts/Misc/FlagZone.cs +++ b/Assets/Scripts/Misc/FlagZone.cs @@ -9,14 +9,14 @@ public class FlagZone : MonoBehaviour public float TimeStayDefenders { get; private set; } private int occupDefenders; private int occupAttackers; - private bool isOccupBoth => (occupDefenders>0) && (occupAttackers>0); - private bool isNotOccup => (occupDefenders == 0) && (occupAttackers == 0); + public bool isOccupBoth => (occupDefenders>0) && (occupAttackers>0); + public bool isNotOccup => (occupDefenders == 0) && (occupAttackers == 0); private float timeForWin; private void Start() { - timeForWin = SettingsReader.Instance.GetSettings.timeToWin; + timeForWin = SettingsReader.Instance.GetSettings.TimeToWin; TimeStayAttackers = 0; TimeStayDefenders = 0; occupAttackers = 0; diff --git a/Assets/Scripts/Misc/Settings.cs b/Assets/Scripts/Misc/Settings.cs index e01abc1..b1b7ea8 100755 --- a/Assets/Scripts/Misc/Settings.cs +++ b/Assets/Scripts/Misc/Settings.cs @@ -3,30 +3,32 @@ [CreateAssetMenu(fileName ="Game Settings", menuName = "Game/Settings", order = 51)] public class Settings : ScriptableObject { - public bool isTesting; + public bool IsTesting; - public float timeToWin; - public float timeOut; + public float TimeToWin; + public float TimeOut; [Header("movement")] - public float movementDistance; - public float movementSpeed; + public float MovementDistance; + public float MovementSpeed; - public TypeAI defTeamAI; - public TypeAI atcTeamAI; - public int numOfDefenders; - public int numOfAttackers; - public bool hasHumanDefender; - public bool hasHumanAttacker; + public TypeAI DefTeamAI; + public TypeAI AtcTeamAI; + public int NumOfDefenders; + public int NumOfAttackers; + public bool HasHumanDefender; + public bool HasHumanAttacker; - public int healthPickupAmount; - public int armourPickupAmount; - public int ammunitionPickupAmount; - public int pickupsAmount; + public int HealthPickupAmount; + public int ArmourPickupAmount; + public int AmmunitionPickupAmount; + public int PickupsAmount; - public int maxHealth; - public int maxArmour; - public int maxAmmo; + public int MaxHealth; + public int MaxArmour; + public int MaxAmmo; + + public float ViewDistance; public float GetHitChanceInDirectPoint; public float GetHitChanceInRunning; diff --git a/Assets/Scripts/Pickups/AmmoPickUp.cs b/Assets/Scripts/Pickups/AmmoPickUp.cs index 506598d..8439c21 100755 --- a/Assets/Scripts/Pickups/AmmoPickUp.cs +++ b/Assets/Scripts/Pickups/AmmoPickUp.cs @@ -13,7 +13,7 @@ public class AmmoPickUp : MonoBehaviour, IPickable public void PickObject(GameObject obj) { - obj.GetComponent()?.GetCharacter.Condition.TakeAmmo(SettingsReader.Instance.GetSettings.ammunitionPickupAmount); + obj.GetComponent()?.GetCharacter.Condition.TakeAmmo(SettingsReader.Instance.GetSettings.AmmunitionPickupAmount); gameObject.SetActive(false); } } diff --git a/Assets/Scripts/Pickups/ArmourPickUp.cs b/Assets/Scripts/Pickups/ArmourPickUp.cs index 86be289..c68e491 100755 --- a/Assets/Scripts/Pickups/ArmourPickUp.cs +++ b/Assets/Scripts/Pickups/ArmourPickUp.cs @@ -13,7 +13,7 @@ public class ArmourPickUp : MonoBehaviour, IPickable public void PickObject(GameObject obj) { - obj.GetComponent()?.GetCharacter.Condition.GiveArmour(SettingsReader.Instance.GetSettings.armourPickupAmount); + obj.GetComponent()?.GetCharacter.Condition.GiveArmour(SettingsReader.Instance.GetSettings.ArmourPickupAmount); gameObject.SetActive(false); } } diff --git a/Assets/Scripts/Pickups/HealthPickUp.cs b/Assets/Scripts/Pickups/HealthPickUp.cs index f92345a..36f81ba 100755 --- a/Assets/Scripts/Pickups/HealthPickUp.cs +++ b/Assets/Scripts/Pickups/HealthPickUp.cs @@ -13,7 +13,7 @@ public class HealthPickUp : MonoBehaviour, IPickable public void PickObject(GameObject obj) { - obj.GetComponent()?.GetCharacter.Condition.GiveHealth(SettingsReader.Instance.GetSettings.healthPickupAmount); + obj.GetComponent()?.GetCharacter.Condition.GiveHealth(SettingsReader.Instance.GetSettings.HealthPickupAmount); gameObject.SetActive(false); } } diff --git a/Assets/Scripts/Pickups/PickUpSpawner.cs b/Assets/Scripts/Pickups/PickUpSpawner.cs index 945c1d3..7a29705 100755 --- a/Assets/Scripts/Pickups/PickUpSpawner.cs +++ b/Assets/Scripts/Pickups/PickUpSpawner.cs @@ -27,7 +27,7 @@ public class PickUpSpawner : MonoBehaviour private void Start() { pickups = new List(); - var amount = SettingsReader.Instance.GetSettings.pickupsAmount; + var amount = SettingsReader.Instance.GetSettings.PickupsAmount; for (int i = 0; i < amount; i++) pickups.Add(GameObject.Instantiate(healthPrefab, spawnPoints[Random.Range(0, spawnPoints.Count)].transform.position, Quaternion.identity)); for (int i = 0; i < amount; i++) diff --git a/Assets/Scripts/Utils/BoolToInteger.cs b/Assets/Scripts/Utils/BoolToInteger.cs new file mode 100644 index 0000000..0d8090a --- /dev/null +++ b/Assets/Scripts/Utils/BoolToInteger.cs @@ -0,0 +1,7 @@ +public static class BoolExtension +{ + public static int ToInt(this bool _bool) + { + return _bool == true ? 1 : 0; + } +} \ No newline at end of file diff --git a/Assets/Scripts/Utils/BoolToInteger.cs.meta b/Assets/Scripts/Utils/BoolToInteger.cs.meta new file mode 100644 index 0000000..3688775 --- /dev/null +++ b/Assets/Scripts/Utils/BoolToInteger.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: f48fff3c2eda14d4fba923fe8875f651 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: From c8af0e528415cb8b3b198edc7eb513fdf9133710 Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Wed, 4 May 2022 23:50:07 +0700 Subject: [PATCH 09/27] to new git --- Assets/Scripts/Bots/CharacterFactory.cs | 44 +++-- Assets/Scripts/Bots/TeamEnum.cs | 11 ++ Assets/Scripts/Character/Character.cs | 6 - .../Scripts/Character/CharacterCondition.cs | 32 +++- .../Interfaces.meta} | 2 +- .../Character/Interfaces/ICharacter.cs | 5 + .../Interfaces/ICharacter.cs.meta} | 2 +- .../Character/Interfaces/INpcBaseState.cs | 17 ++ .../Interfaces/INpcBaseState.cs.meta} | 2 +- .../Scripts/Character/MovementController.cs | 70 +++++-- Assets/Scripts/Character/NPC.cs | 178 ++++++++++++------ Assets/Scripts/Character/NPC_State.cs | 46 ----- Assets/Scripts/Character/NpcState.cs | 68 +++++++ .../{NPC_State.cs.meta => NpcState.cs.meta} | 0 Assets/Scripts/Character/Player.cs | 12 +- .../Character/scr_CharacterController.cs | 64 +++---- Assets/Scripts/Character/scr_Models.cs | 120 ++++++------ Assets/Scripts/Managers/GameManager.cs | 107 ++++++++--- Assets/Scripts/Managers/MapManager.cs | 63 ++++++- Assets/Scripts/Managers/TimeManager.cs | 8 +- Assets/Scripts/Misc/FlagZone.cs | 14 +- Assets/Scripts/Misc/NavPoint.cs | 20 +- Assets/Scripts/Misc/Settings.cs | 4 +- Assets/Scripts/Misc/SettingsReader.cs | 16 +- Assets/Scripts/Misc/Statistics.cs | 9 - Assets/Scripts/Pickups/AmmoPickUp.cs | 8 +- Assets/Scripts/Pickups/ArmourPickUp.cs | 8 +- Assets/Scripts/Pickups/HealthPickUp.cs | 8 +- Assets/Scripts/Pickups/IPickable.cs | 5 +- Assets/Scripts/Pickups/PickUpSpawner.cs | 10 +- Assets/Scripts/Sensors/SensorType.cs | 6 - Assets/Scripts/Sensors/Sensors.cs | 4 - Assets/Scripts/Statistics.meta | 8 + Assets/Scripts/Statistics/Logger.cs | 19 ++ Assets/Scripts/Statistics/Logger.cs.meta | 11 ++ Assets/Scripts/Statistics/StatisticManager.cs | 51 +++++ .../StatisticManager.cs.meta} | 2 +- .../Scripts/Utils/SerializableDictionary.cs | 4 +- .../Scripts/Weapons/scr_WeaponController.cs | 15 +- 39 files changed, 720 insertions(+), 359 deletions(-) rename Assets/Scripts/{Sensors.meta => Character/Interfaces.meta} (77%) mode change 100755 => 100644 create mode 100644 Assets/Scripts/Character/Interfaces/ICharacter.cs rename Assets/Scripts/{Sensors/SensorType.cs.meta => Character/Interfaces/ICharacter.cs.meta} (83%) mode change 100755 => 100644 create mode 100644 Assets/Scripts/Character/Interfaces/INpcBaseState.cs rename Assets/Scripts/{Sensors/Sensors.cs.meta => Character/Interfaces/INpcBaseState.cs.meta} (83%) delete mode 100644 Assets/Scripts/Character/NPC_State.cs create mode 100644 Assets/Scripts/Character/NpcState.cs rename Assets/Scripts/Character/{NPC_State.cs.meta => NpcState.cs.meta} (100%) delete mode 100755 Assets/Scripts/Misc/Statistics.cs delete mode 100755 Assets/Scripts/Sensors/SensorType.cs delete mode 100644 Assets/Scripts/Sensors/Sensors.cs create mode 100644 Assets/Scripts/Statistics.meta create mode 100644 Assets/Scripts/Statistics/Logger.cs create mode 100644 Assets/Scripts/Statistics/Logger.cs.meta create mode 100644 Assets/Scripts/Statistics/StatisticManager.cs rename Assets/Scripts/{Misc/Statistics.cs.meta => Statistics/StatisticManager.cs.meta} (91%) mode change 100755 => 100644 diff --git a/Assets/Scripts/Bots/CharacterFactory.cs b/Assets/Scripts/Bots/CharacterFactory.cs index ab18289..0ca7c1d 100644 --- a/Assets/Scripts/Bots/CharacterFactory.cs +++ b/Assets/Scripts/Bots/CharacterFactory.cs @@ -1,26 +1,28 @@ using System.Collections.Generic; using UnityEngine; -using Unity; public class CharacterFactory : MonoBehaviour { - private CharacterFactory instance; - public CharacterFactory Instance { get { return instance; } } + private static CharacterFactory instance; + public static CharacterFactory Instance => instance; [SerializeField] private List spawnPointsForDefendersTeam; [SerializeField] private List spawnPointsForAttackersTeam; [SerializeField] private GameObject AIPrefab; [SerializeField] private GameObject PlayerPrefab; - private List Bots = new List(); - private GameObject Player; + private List bots = new List(); + public GameObject player { get; private set; } private void Awake() { if (instance == null) instance = this; else + { Destroy(gameObject); + Debug.LogError("Only 1 Instance"); + } } private void Start() @@ -53,7 +55,7 @@ public class CharacterFactory : MonoBehaviour { var gameobject = GameObject.Instantiate( typeAi == TypeAI.HumanAI ? PlayerPrefab : AIPrefab, - spawnPoint.position, + spawnPoint.Position, Quaternion.identity); gameobject.SetActive(true); if (team == Team.Attackers) @@ -64,35 +66,49 @@ public class CharacterFactory : MonoBehaviour if (typeAi == TypeAI.HumanAI) { gameobject.GetComponent().GetCharacter.Team = team; - Player = gameobject; + player = gameobject; } else { gameobject.GetComponent().GetCharacter.Team = team; - gameobject.GetComponent().CurrentNavPoint = spawnPoint; - Bots.Add(gameobject); + gameobject.GetComponent().PointStartID = spawnPoint.PointId; + bots.Add(gameobject); } } + public void ReSpawn(ICharacter character, ref Vector3 pos, ref int startPointId) + { + character.ResetCharacter(); + var team = character.GetCharacter.Team; + NavPoint navPoint; + if (team == Team.Attackers) + navPoint = spawnPointsForAttackersTeam[Random.Range(0, spawnPointsForAttackersTeam.Count)]; + else + navPoint = spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)]; + + pos = navPoint.Position; + startPointId = navPoint.PointId; + } + private void ResetCharacters() { - foreach (var bot in Bots) + foreach (var bot in bots) { var npc = bot.GetComponent(); npc.ResetCharacter(); if (npc.GetCharacter.Team == Team.Attackers) - bot.transform.position = spawnPointsForAttackersTeam[Random.Range(0, spawnPointsForAttackersTeam.Count)].position; + bot.transform.position = spawnPointsForAttackersTeam[Random.Range(0, spawnPointsForAttackersTeam.Count)].Position; else - bot.transform.position = spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)].position; + bot.transform.position = spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)].Position; } Player player; if (TryGetComponent(out player)) { player.ResetCharacter(); if (player.GetCharacter.Team == Team.Attackers) - Player.transform.position = spawnPointsForAttackersTeam[Random.Range(0, spawnPointsForAttackersTeam.Count)].position; + this.player.transform.position = spawnPointsForAttackersTeam[Random.Range(0, spawnPointsForAttackersTeam.Count)].Position; else - Player.transform.position = spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)].position; + this.player.transform.position = spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)].Position; } } } \ No newline at end of file diff --git a/Assets/Scripts/Bots/TeamEnum.cs b/Assets/Scripts/Bots/TeamEnum.cs index 68279f5..a2c8a95 100755 --- a/Assets/Scripts/Bots/TeamEnum.cs +++ b/Assets/Scripts/Bots/TeamEnum.cs @@ -2,4 +2,15 @@ { Defenders, Attackers, +} + +public static class TeamExtension +{ + public static Team GetOppositeTeam(this Team team) + { + if (team == Team.Attackers) + return Team.Defenders; + else + return Team.Attackers; + } } \ No newline at end of file diff --git a/Assets/Scripts/Character/Character.cs b/Assets/Scripts/Character/Character.cs index 50e7313..3713998 100644 --- a/Assets/Scripts/Character/Character.cs +++ b/Assets/Scripts/Character/Character.cs @@ -7,12 +7,6 @@ public class Character public Character() { - Debug.Log("init"); Condition = new CharacterCondition(); } -} - -public interface ICharacter -{ - Character GetCharacter { get; } } \ No newline at end of file diff --git a/Assets/Scripts/Character/CharacterCondition.cs b/Assets/Scripts/Character/CharacterCondition.cs index b674222..de60740 100755 --- a/Assets/Scripts/Character/CharacterCondition.cs +++ b/Assets/Scripts/Character/CharacterCondition.cs @@ -9,17 +9,17 @@ public class CharacterCondition public event Action OnChangeAmmunitionEvent; private int health; - public int HealthPoints - { - get - { - return health; - } + public int HealthPoints + { + get + { + return health; + } private set { health = value; OnChangeHealthEvent?.Invoke(value); - } + } } public int GetHealthPointsInQuantile() @@ -30,7 +30,7 @@ public class CharacterCondition return 1; else if (health < 75) return 2; - else return 3; + else return 3; } private int armour; public int ArmourPoints @@ -45,6 +45,17 @@ public class CharacterCondition OnChangeArmourEvent?.Invoke(value); } } + public int GetArmourPointsInQuantile() + { + if (armour < 25) + return 0; + else if (armour < 50) + return 1; + else if (armour < 75) + return 2; + else return 3; + } + private int ammo; public int Ammunition { @@ -60,6 +71,11 @@ public class CharacterCondition } public CharacterCondition() + { + this.Reset(); + } + + public void Reset() { var settings = SettingsReader.Instance.GetSettings; ammo = settings.MaxAmmo; diff --git a/Assets/Scripts/Sensors.meta b/Assets/Scripts/Character/Interfaces.meta old mode 100755 new mode 100644 similarity index 77% rename from Assets/Scripts/Sensors.meta rename to Assets/Scripts/Character/Interfaces.meta index a808c2b..cb4a3aa --- a/Assets/Scripts/Sensors.meta +++ b/Assets/Scripts/Character/Interfaces.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 5e73ba257bc6b684c86edf9ecfd475ef +guid: f23b6db3be1e4cd469fd18dfe3e39764 folderAsset: yes DefaultImporter: externalObjects: {} diff --git a/Assets/Scripts/Character/Interfaces/ICharacter.cs b/Assets/Scripts/Character/Interfaces/ICharacter.cs new file mode 100644 index 0000000..aef14a7 --- /dev/null +++ b/Assets/Scripts/Character/Interfaces/ICharacter.cs @@ -0,0 +1,5 @@ +public interface ICharacter +{ + Character GetCharacter { get; } + void ResetCharacter(); +} \ No newline at end of file diff --git a/Assets/Scripts/Sensors/SensorType.cs.meta b/Assets/Scripts/Character/Interfaces/ICharacter.cs.meta old mode 100755 new mode 100644 similarity index 83% rename from Assets/Scripts/Sensors/SensorType.cs.meta rename to Assets/Scripts/Character/Interfaces/ICharacter.cs.meta index 26f4b58..b53c34e --- a/Assets/Scripts/Sensors/SensorType.cs.meta +++ b/Assets/Scripts/Character/Interfaces/ICharacter.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 8f76201fe6436164789d10350a0fd6e2 +guid: b6dfb78244ae35c4db1326d5f5b73375 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/Assets/Scripts/Character/Interfaces/INpcBaseState.cs b/Assets/Scripts/Character/Interfaces/INpcBaseState.cs new file mode 100644 index 0000000..bf17ad2 --- /dev/null +++ b/Assets/Scripts/Character/Interfaces/INpcBaseState.cs @@ -0,0 +1,17 @@ +using UnityEngine; + +public interface INpcBaseState +{ + NpcEnumState State { get; } + bool InCover { get; } + bool IsRunning { get; } + bool InDirectPoint { get; } + float HitChance { get; } + float DoDamageChance { get; } +} + +public interface INpcBaseBodyState +{ + NpcBodyState State { get; } + Vector3 GetPointToHit(GameObject go); +} \ No newline at end of file diff --git a/Assets/Scripts/Sensors/Sensors.cs.meta b/Assets/Scripts/Character/Interfaces/INpcBaseState.cs.meta similarity index 83% rename from Assets/Scripts/Sensors/Sensors.cs.meta rename to Assets/Scripts/Character/Interfaces/INpcBaseState.cs.meta index 1109bef..f0585e4 100644 --- a/Assets/Scripts/Sensors/Sensors.cs.meta +++ b/Assets/Scripts/Character/Interfaces/INpcBaseState.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 4599c57bc5b1c3945847dead0f9f0ba4 +guid: 58b7e1962495ada4c8e6ee6219c99a20 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/Assets/Scripts/Character/MovementController.cs b/Assets/Scripts/Character/MovementController.cs index 334b7d2..2319c08 100644 --- a/Assets/Scripts/Character/MovementController.cs +++ b/Assets/Scripts/Character/MovementController.cs @@ -1,24 +1,32 @@ -using System.Linq; -using System.Collections.Generic; +using System.Collections.Generic; +using System.Linq; using UnityEngine; using UnityEngine.AI; -using System.Threading.Tasks; [RequireComponent(typeof(NavMeshAgent))] public class MovementController : MonoBehaviour { - public NavPoint CurrentNavPoint { get; set; } + public int PointStartID { get; set; } + public int PointEndID { get; private set; } public float FlagDistance { get; private set; } - private GameObject flag; - private const float updateFlagPositionDelay = 5; - [SerializeField] private NavMeshAgent navMeshAgent; + private const float updateFlagPositionDelay = 5; + private const float updateReachedDestinationDelay = 5; - private void Start() + [SerializeField] private NavMeshAgent navMeshAgent; + [SerializeField] private GameObject flag; + public float DistanceToGo { get; private set; } + public float RemainingDistance => navMeshAgent.remainingDistance; + private Dictionary idNavPointDict; + + + private void Awake() { navMeshAgent.speed = SettingsReader.Instance.GetSettings.MovementSpeed; + idNavPointDict = MapManager.IDToNavPoint; InvokeRepeating(nameof(UpdateFlagPosition), 0, updateFlagPositionDelay); + InvokeRepeating(nameof(ReachedDestination), 0, updateReachedDestinationDelay); } - + private void OnDestroy() { CancelInvoke(nameof(UpdateFlagPosition)); @@ -30,18 +38,46 @@ public class MovementController : MonoBehaviour } public void MoveToRandomPoint() - { - Debug.Log(MapManager.navPoints == null); - goToNextNavPoint(MapManager.navPoints[Random.Range(0, MapManager.navPoints.Count)]); + { + Debug.Log(MapManager.NavPoints == null); + GoToNextNavPoint(MapManager.NavPoints[Random.Range(0, MapManager.NavPoints.Count)]); } - public List getPointsCandidate() + public List GetPointsCandidate() { - return MapManager.navPoints - .Where(point => (CurrentNavPoint.position - point.position).magnitude < SettingsReader.Instance.GetSettings.MovementSpeed) + return MapManager.NavPoints + .Where(point => + (idNavPointDict[PointStartID].Position - point.Position).magnitude < SettingsReader.Instance.GetSettings.MovementDistance) .ToList(); } - public void goToNextNavPoint(NavPoint destination) => - navMeshAgent.SetDestination(destination.position); + public void GoToNextNavPoint(NavPoint destination) + { + if (navMeshAgent.isStopped == true) navMeshAgent.isStopped = false; + PointStartID = PointEndID; + PointEndID = destination.PointId; + navMeshAgent.SetDestination(destination.Position); + DistanceToGo = navMeshAgent.remainingDistance; + } + + public void ReturnToStartPoint() + { + if (navMeshAgent.isStopped == true) navMeshAgent.isStopped = false; + navMeshAgent.SetDestination(idNavPointDict[PointStartID].Position); + PointEndID = PointStartID; + PointStartID = -1; + } + + public void StopOnPath() + { + navMeshAgent.isStopped = true; + PointStartID = -1; + PointEndID = -1; + } + + public void ReachedDestination() + { + if ((navMeshAgent.isStopped == false) && (navMeshAgent.velocity.magnitude < 0.1)) + PointStartID = PointEndID; + } } diff --git a/Assets/Scripts/Character/NPC.cs b/Assets/Scripts/Character/NPC.cs index c738d0b..d47e377 100644 --- a/Assets/Scripts/Character/NPC.cs +++ b/Assets/Scripts/Character/NPC.cs @@ -1,108 +1,170 @@ using System; -using UnityEngine; +using System.Collections.Generic; using Unity.MLAgents; -using Unity.MLAgents.Sensors; using Unity.MLAgents.Actuators; +using Unity.MLAgents.Sensors; +using UnityEngine; -[RequireComponent(typeof(MovementController))] +[RequireComponent(typeof(MovementController),typeof(BufferSensor))] public class NPC : Agent, ICharacter { [HideInInspector] - public Character AgentCharacter; + private Character AgentCharacter; public CharacterCondition Condition; - private FlagZone flagZone; + private FlagZone flagZone = null; - public NPC_BaseState NPC_State { get; private set; } + public INpcBaseState NpcState { get; private set; } + public INpcBaseBodyState NpcBodyState { get; private set; } public Character GetCharacter => AgentCharacter; - private NPC_DirectPointState DirectState; - private NPC_InCoverState CoverState; - private NPC_RunningState RunningState; + private NpcDirectPointState DirectState; + private NpcInCoverState CoverState; + private NpcRunningState RunningState; + + private NpcStandingState StandingState; + private NpcCrouchingState CrouchingState; private MovementController moveController; private BufferSensorComponent bufferSensor; + private Dictionary navPointIdDict; + + #region UnityEvents and ML private void Awake() { - DirectState = new NPC_DirectPointState(); - CoverState = new NPC_InCoverState(); - RunningState = new NPC_RunningState(); - NPC_State = DirectState; + DirectState = new NpcDirectPointState(); + CoverState = new NpcInCoverState(); + RunningState = new NpcRunningState(); + NpcState = DirectState; + + CrouchingState = new NpcCrouchingState(); + StandingState = new NpcStandingState(); + NpcBodyState = StandingState; AgentCharacter = new Character(); Condition = AgentCharacter.Condition; moveController = gameObject.GetComponent(); bufferSensor = gameObject.GetComponent(); - } - - public void ResetCharacter() + flagZone = GameObject.FindObjectOfType(); + if (flagZone == null) + Debug.LogError("Flag Is Not Setted"); + + navPointIdDict = MapManager.IDToNavPoint; + if (navPointIdDict is null) + Debug.LogError("Cant Find Nav Point Dictionary"); + } + + private void OnDestroy() { - Condition = new CharacterCondition(); - EndEpisode(); + Debug.LogWarning("Pooled object was destroyed"); } public override void OnEpisodeBegin() - { - NPC_State = DirectState; + { + NpcState = DirectState; flagZone = GameObject.FindObjectOfType(); } public override void CollectObservations(VectorSensor sensor) { - var candidates = moveController.getPointsCandidate(); + var candidates = moveController.GetPointsCandidate(); - sensor.AddObservation(Condition.HealthPoints); - sensor.AddObservation(Condition.ArmourPoints); - sensor.AddObservation(Condition.Ammunition); - sensor.AddObservation((int)NPC_State.State); - sensor.AddObservation((!flagZone.isNotOccup).ToInt()); + //common sensors + sensor.AddObservation(GameManager.IsHaveSeenByEnemy(AgentCharacter.Team.GetOppositeTeam(), + NpcBodyState.GetPointToHit(gameObject)).ToInt()); sensor.AddObservation(AgentCharacter.LastTimeHit); + sensor.AddObservation((!flagZone.IsNotOccup).ToInt()); sensor.AddObservation(Condition.GetHealthPointsInQuantile()); + sensor.AddObservation(Condition.GetArmourPointsInQuantile()); sensor.AddObservation(candidates.Count); - sensor.AddObservation(GameManager.IsEnemyNearby(gameObject.transform.position, AgentCharacter.Team)); - + sensor.AddObservation(moveController.PointStartID); + sensor.AddObservation(moveController.PointEndID); + //state sensors + sensor.AddObservation((int)NpcState.State); + sensor.AddObservation((int)NpcBodyState.State); + sensor.AddObservation(GameManager.IsEnemyNearby(gameObject.transform.position, AgentCharacter.Team)); + sensor.AddObservation(navPointIdDict[moveController.PointStartID].DeathAttr); + sensor.AddObservation(navPointIdDict[moveController.PointEndID].DeathAttr); + sensor.AddObservation(moveController.FlagDistance); + + //point sensors foreach (var point in candidates) { - Debug.Log((float)moveController.CurrentNavPoint.PointId); - bufferSensor.AppendObservation(new float[] { - //1 position in navpointId - (float)moveController.CurrentNavPoint.PointId, - //2 distance to flag - moveController.FlagDistance, - //3 death count in point - moveController.CurrentNavPoint.DeathAttr, + point.DeathAttr, + (int)point.navType, //4 flagEnemyDistance GameManager.IsCloserToFlagFromNextNavPoint(point, transform.position).ToInt(), //5 EnemyVsNavPointDistance - GameManager.IsCloserToEnemyThanToNextNavPoint(point,transform.position, AgentCharacter.Team).ToInt() - }); - - } - } - - public override void Heuristic(in ActionBuffers actionsOut) - { - var discreteActionsOut = actionsOut.DiscreteActions; - if (Input.GetKeyDown(KeyCode.W)) - { - discreteActionsOut[0] = 1; + GameManager.IsCloserToEnemyThanToNextNavPoint(point,transform.position, AgentCharacter.Team.GetOppositeTeam()).ToInt(), + //6 Have been seen by enemy in this point + GameManager.IsHaveSeenByEnemy(AgentCharacter.Team.GetOppositeTeam(), + point.Position).ToInt() + }); } } public override void OnActionReceived(ActionBuffers actions) { - if (actions.DiscreteActions[0] == 1) + var result = actions.DiscreteActions; + if (result[0] == 0) { - moveController.MoveToRandomPoint(); - NPC_State = RunningState; + if (navPointIdDict[moveController.PointStartID].navType != NavPointType.Cover) + return; + NpcState = CoverState; + + switch (result[1]) + { + case 0: Peek(); break; + case 1: Cover(); break; + case 3: Peek(); moveController.GoToNextNavPoint(navPointIdDict[result[2]]); break; + case 4: NpcState = DirectState; break; + default: throw new ArgumentException("Undefined Action recieved"); + } + } + if (result[0] == 1) + { + if (navPointIdDict[moveController.PointStartID].navType != NavPointType.Direction) + return; + switch (result[1]) + { + case 0: moveController.GoToNextNavPoint(navPointIdDict[result[2]]); + NpcState = RunningState; break; + case 1: NpcState = DirectState; break; + default: throw new ArgumentException("Undefined Action recieved"); + } + } + if (result[0] == 2) + { + if (moveController.PointStartID == moveController.PointEndID && moveController.PointEndID != -1) + return; + switch (result[1]) + { + case 0: moveController.StopOnPath(); NpcState = DirectState; break; + case 1: moveController.ReturnToStartPoint(); NpcState = RunningState; break; + default: throw new ArgumentException("Undefined Action recieved"); + } } } + #endregion - public event Action OnKilledEvent; + public event Action OnChangePosition; + private void Peek() + { + OnChangePosition?.Invoke(global::NpcBodyState.Standing); + NpcBodyState = StandingState; + } + + private void Cover() + { + OnChangePosition?.Invoke(global::NpcBodyState.Crouching); + NpcBodyState = CrouchingState; + } + + public event Action OnDamageRecieved; public void GetDamage(float damage) { AgentCharacter.LastTimeHit = TimeManager.Instance.CurrentTime; @@ -111,13 +173,17 @@ public class NPC : Agent, ICharacter if (Condition.HealthPoints < 0) { - OnKilledEvent?.Invoke(this); - moveController.CurrentNavPoint.DeathAttr += 1; + MapManager.AddDeathAttributeToPoints(moveController.PointStartID, moveController.PointEndID, + moveController.DistanceToGo, moveController.RemainingDistance); + var pos = gameObject.transform.position; + var id = moveController.PointStartID; + CharacterFactory.Instance.ReSpawn(this, ref pos, ref id); } } - private void OnDestroy() + public void ResetCharacter() { - Debug.LogWarning("Pooled object was destroyed"); + Condition.Reset(); + EndEpisode(); } } diff --git a/Assets/Scripts/Character/NPC_State.cs b/Assets/Scripts/Character/NPC_State.cs deleted file mode 100644 index cc2802c..0000000 --- a/Assets/Scripts/Character/NPC_State.cs +++ /dev/null @@ -1,46 +0,0 @@ -public enum NPC_EnumState -{ - InCover, - InDirectPoint, - InRunning, -} - -public interface NPC_BaseState -{ - NPC_EnumState State { get; } - bool InCover { get; } - bool IsRunning { get; } - bool InDirectPoint { get; } - float HitChance { get; } - float DoDamageChance { get; } -} - -public class NPC_DirectPointState : NPC_BaseState -{ - public bool InCover => false; - public bool IsRunning => false; - public bool InDirectPoint => false; - public float HitChance => SettingsReader.Instance.GetSettings.GetHitChanceInDirectPoint; - public float DoDamageChance => SettingsReader.Instance.GetSettings.DoDamageChanceInDirectPoint; - public NPC_EnumState State => NPC_EnumState.InDirectPoint; -} - -public class NPC_RunningState : NPC_BaseState -{ - public bool InCover => false; - public bool IsRunning => true; - public bool InDirectPoint => false; - public float HitChance => SettingsReader.Instance.GetSettings.GetHitChanceInRunning; - public float DoDamageChance => SettingsReader.Instance.GetSettings.DoDamageChanceInRunning; - public NPC_EnumState State => NPC_EnumState.InRunning; -} - -public class NPC_InCoverState : NPC_BaseState -{ - public bool InCover => true; - public bool IsRunning => false; - public bool InDirectPoint => false; - public float HitChance => SettingsReader.Instance.GetSettings.GetHitChanceInCover; - public float DoDamageChance => SettingsReader.Instance.GetSettings.DoDamageChanceInCover; - public NPC_EnumState State => NPC_EnumState.InCover; -} diff --git a/Assets/Scripts/Character/NpcState.cs b/Assets/Scripts/Character/NpcState.cs new file mode 100644 index 0000000..51feda6 --- /dev/null +++ b/Assets/Scripts/Character/NpcState.cs @@ -0,0 +1,68 @@ +using UnityEngine; + +public enum NpcEnumState +{ + InCover, + InDirectPoint, + InRunning, +} + +public enum NpcBodyState +{ + Crouching, + Standing, +} + +public class NpcCrouchingState : INpcBaseBodyState +{ + public NpcBodyState State => NpcBodyState.Crouching; + + public Vector3 GetPointToHit(GameObject go) + { + MeshRenderer meshRenderer; + go.TryGetComponent(out meshRenderer); + return meshRenderer.bounds.center; + } +} + +public class NpcStandingState : INpcBaseBodyState +{ + public NpcBodyState State => NpcBodyState.Standing; + + public Vector3 GetPointToHit(GameObject go) + { + MeshRenderer meshRenderer; + go.TryGetComponent(out meshRenderer); + return meshRenderer.bounds.center; + } +} + +public class NpcDirectPointState : INpcBaseState +{ + public bool InCover => false; + public bool IsRunning => false; + public bool InDirectPoint => false; + public float HitChance => SettingsReader.Instance.GetSettings.GetHitChanceInDirectPoint; + public float DoDamageChance => SettingsReader.Instance.GetSettings.DoDamageChanceInDirectPoint; + public NpcEnumState State => NpcEnumState.InDirectPoint; +} + +public class NpcRunningState : INpcBaseState +{ + public bool InCover => false; + public bool IsRunning => true; + public bool InDirectPoint => false; + public float HitChance => SettingsReader.Instance.GetSettings.GetHitChanceInRunning; + public float DoDamageChance => SettingsReader.Instance.GetSettings.DoDamageChanceInRunning; + public NpcEnumState State => NpcEnumState.InRunning; +} + +public class NpcInCoverState : INpcBaseState +{ + public bool InCover => true; + public bool IsRunning => false; + public bool InDirectPoint => false; + public float HitChance => SettingsReader.Instance.GetSettings.GetHitChanceInCover; + public float DoDamageChance => SettingsReader.Instance.GetSettings.DoDamageChanceInCover; + public NpcEnumState State => NpcEnumState.InCover; +} diff --git a/Assets/Scripts/Character/NPC_State.cs.meta b/Assets/Scripts/Character/NpcState.cs.meta similarity index 100% rename from Assets/Scripts/Character/NPC_State.cs.meta rename to Assets/Scripts/Character/NpcState.cs.meta diff --git a/Assets/Scripts/Character/Player.cs b/Assets/Scripts/Character/Player.cs index e593f51..593a326 100644 --- a/Assets/Scripts/Character/Player.cs +++ b/Assets/Scripts/Character/Player.cs @@ -15,9 +15,9 @@ public class Player : MonoBehaviour, ICharacter Condition = PlayerCharacter.Condition; } - public void ResetCharacter() + private void OnDestroy() { - Condition = new CharacterCondition(); + Debug.LogWarning("Pooled object was destroyed"); } public event Action OnKilledEvent; @@ -27,12 +27,12 @@ public class Player : MonoBehaviour, ICharacter Condition.GiveHealth(-Mathf.RoundToInt(damage * (1 - Condition.ArmourPoints * 0.5f))); Condition.GiveArmour(-Mathf.RoundToInt(Mathf.Sqrt(damage) * 5)); - if (Condition.HealthPoints < 0) - OnKilledEvent?.Invoke(this); + if (Condition.HealthPoints < 0) + OnKilledEvent?.Invoke(this); } - private void OnDestroy() + public void ResetCharacter() { - Debug.LogWarning("Pooled object was destroyed"); + Condition = new CharacterCondition(); } } diff --git a/Assets/Scripts/Character/scr_CharacterController.cs b/Assets/Scripts/Character/scr_CharacterController.cs index 2e54f22..e4f847d 100755 --- a/Assets/Scripts/Character/scr_CharacterController.cs +++ b/Assets/Scripts/Character/scr_CharacterController.cs @@ -1,10 +1,6 @@ -using System; -using System.Collections; -using System.Collections.Generic; -using Unity.Barracuda; -using UnityEngine; +using UnityEngine; -using static scr_Models; +using static scr_Models; public class scr_CharacterController : MonoBehaviour { @@ -14,7 +10,7 @@ public class scr_CharacterController : MonoBehaviour private Vector2 input_Movement; [HideInInspector] public Vector2 input_View; - + private Vector3 newCameraRotation; private Vector3 newCharacterRotation; @@ -22,14 +18,14 @@ public class scr_CharacterController : MonoBehaviour public Transform cameraHolder; public Transform feetTransform; - [Header("Settings")] + [Header("Settings")] public PlayerSettingsModel playerSettings; public float ViewClampYMin = -70; public float ViewClampYMax = 80; public LayerMask playerMask; - - [Header("Gravity")] + + [Header("Gravity")] public float gravityAmount; public float gravityMin; private float playerGravity; @@ -37,14 +33,14 @@ public class scr_CharacterController : MonoBehaviour public Vector3 jumpingForce; private Vector3 jumpingForceVelocity; - [Header("Stance")] + [Header("Stance")] public PlayerStance playerStance; public float playerStanceSmoothing; public CharacterStance playerStandStance; public CharacterStance playerCrouchStance; public CharacterStance playerProneStance; private float stanceCheckErrorMargin = 0.05f; - + private float cameraHeight; private float cameraHeightVelocity; @@ -61,13 +57,13 @@ public class scr_CharacterController : MonoBehaviour defaultInput.Character.Movement.performed += e => input_Movement = e.ReadValue(); defaultInput.Character.View.performed += e => input_View = e.ReadValue(); defaultInput.Character.Jump.performed += e => Jump(); - + defaultInput.Character.Crouch.performed += e => Crouch(); defaultInput.Character.Prone.performed += e => Prone(); - + defaultInput.Character.Sprint.performed += e => ToggleSprint(); defaultInput.Character.SprintReleased.performed += e => StopSprint(); - + defaultInput.Enable(); newCameraRotation = cameraHolder.localRotation.eulerAngles; @@ -80,7 +76,7 @@ public class scr_CharacterController : MonoBehaviour { currentWeapon.Initialise(this); } - + } private void Update() @@ -95,10 +91,10 @@ public class scr_CharacterController : MonoBehaviour { newCharacterRotation.y += playerSettings.ViewXSensetivity * (playerSettings.ViewXInverted ? -input_View.x : input_View.x) * Time.deltaTime; transform.localRotation = Quaternion.Euler(newCharacterRotation); - + newCameraRotation.x += playerSettings.ViewYSensetivity * (playerSettings.ViewYInverted ? input_View.y : -input_View.y) * Time.deltaTime; newCameraRotation.x = Mathf.Clamp(newCameraRotation.x, ViewClampYMin, ViewClampYMax); - + cameraHolder.localRotation = Quaternion.Euler(newCameraRotation); } @@ -108,7 +104,7 @@ public class scr_CharacterController : MonoBehaviour { isSprinting = false; } - + var verticalSpeed = playerSettings.WalkingForwardSpeed; var horizontalSpeed = playerSettings.WalkingStrafeSpeed; @@ -117,17 +113,17 @@ public class scr_CharacterController : MonoBehaviour verticalSpeed = playerSettings.RunningForwardSpeed; horizontalSpeed = playerSettings.RunningStrafeSpeed; } - + // Effectors if (!characterController.isGrounded) { playerSettings.SpeedEffector = playerSettings.FallingSpeedEffector; } - else if(playerStance == PlayerStance.Crouch) + else if (playerStance == PlayerStance.Crouch) { playerSettings.SpeedEffector = playerSettings.CrouchSpeedEffector; - } - else if(playerStance == PlayerStance.Prone) + } + else if (playerStance == PlayerStance.Prone) { playerSettings.SpeedEffector = playerSettings.ProneSpeedEffector; } @@ -135,15 +131,15 @@ public class scr_CharacterController : MonoBehaviour { playerSettings.SpeedEffector = 1; } - + verticalSpeed *= playerSettings.SpeedEffector; horizontalSpeed *= playerSettings.SpeedEffector; - + newMovementSpeed = Vector3.SmoothDamp(newMovementSpeed, new Vector3(horizontalSpeed * input_Movement.x * Time.deltaTime, 0, verticalSpeed * input_Movement.y * Time.deltaTime), ref newMovementSpeedVelocity, characterController.isGrounded ? playerSettings.MovementSmoothing : playerSettings.FallingSmoothing); - + var MovementSpeed = transform.TransformDirection(newMovementSpeed); if (playerGravity > gravityMin) @@ -158,7 +154,7 @@ public class scr_CharacterController : MonoBehaviour MovementSpeed.y += playerGravity; MovementSpeed += jumpingForce * Time.deltaTime; - + characterController.Move(MovementSpeed); } @@ -179,7 +175,7 @@ public class scr_CharacterController : MonoBehaviour { stanceHeight = playerProneStance.CameraHeight; } - + cameraHeight = Mathf.SmoothDamp(cameraHolder.localPosition.y, stanceHeight, ref cameraHeightVelocity, playerStanceSmoothing); cameraHolder.localPosition = new Vector3(cameraHolder.localPosition.x, cameraHeight, cameraHolder.localPosition.z); @@ -190,7 +186,7 @@ public class scr_CharacterController : MonoBehaviour { return; } - + if (playerStance == PlayerStance.Crouch) { if (StanceCheck(playerStandStance.StanceCollider.height)) @@ -200,7 +196,7 @@ public class scr_CharacterController : MonoBehaviour playerStance = PlayerStance.Stand; return; } - + // Jump jumpingForce = Vector3.up * playerSettings.JumpingHeight; playerGravity = 0; @@ -233,8 +229,8 @@ public class scr_CharacterController : MonoBehaviour { var start = new Vector3(feetTransform.position.x, feetTransform.position.y + characterController.radius + stanceCheckErrorMargin, feetTransform.position.z); var end = new Vector3(feetTransform.position.x, feetTransform.position.y - characterController.radius - stanceCheckErrorMargin + stanceCheckheight, feetTransform.position.z); - - + + return Physics.CheckCapsule(start, end, characterController.radius, playerMask); } @@ -247,7 +243,7 @@ public class scr_CharacterController : MonoBehaviour } isSprinting = !isSprinting; } - + private void StopSprint() { if (playerSettings.SprintingHold) @@ -255,5 +251,5 @@ public class scr_CharacterController : MonoBehaviour isSprinting = false; } } - + } diff --git a/Assets/Scripts/Character/scr_Models.cs b/Assets/Scripts/Character/scr_Models.cs index 3aca984..c735db5 100755 --- a/Assets/Scripts/Character/scr_Models.cs +++ b/Assets/Scripts/Character/scr_Models.cs @@ -3,73 +3,73 @@ using UnityEngine; public static class scr_Models { - #region Player - - public enum PlayerStance - { - Stand, - Crouch, - Prone - } - - [Serializable] - public class PlayerSettingsModel - { - [Header("View Settings")] - public float ViewXSensetivity; - public float ViewYSensetivity; + #region Player - public bool ViewXInverted; - public bool ViewYInverted; + public enum PlayerStance + { + Stand, + Crouch, + Prone + } - [Header("Movement Settings")] - public bool SprintingHold; - public float MovementSmoothing; - - [Header("Movement - Running")] - public float RunningForwardSpeed; - public float RunningStrafeSpeed; - - [Header("Movement - Walking")] - public float WalkingForwardSpeed; - public float WalkingBackwardSpeed; - public float WalkingStrafeSpeed; + [Serializable] + public class PlayerSettingsModel + { + [Header("View Settings")] + public float ViewXSensetivity; + public float ViewYSensetivity; - [Header("Jumping")] - public float JumpingHeight; - public float JumpingFalloff; - public float FallingSmoothing; + public bool ViewXInverted; + public bool ViewYInverted; - [Header("Speed Effectors")] - public float SpeedEffector = 1; - public float CrouchSpeedEffector; - public float ProneSpeedEffector; - public float FallingSpeedEffector; - } + [Header("Movement Settings")] + public bool SprintingHold; + public float MovementSmoothing; - [Serializable] - public class CharacterStance - { - public float CameraHeight; - public CapsuleCollider StanceCollider; - } + [Header("Movement - Running")] + public float RunningForwardSpeed; + public float RunningStrafeSpeed; - #endregion + [Header("Movement - Walking")] + public float WalkingForwardSpeed; + public float WalkingBackwardSpeed; + public float WalkingStrafeSpeed; - #region - Weapons - + [Header("Jumping")] + public float JumpingHeight; + public float JumpingFalloff; + public float FallingSmoothing; - [Serializable] - public class WeaponSettingsModel - { - [Header("Sway")] - public float SwayAmount; - public bool SwayYInverted; - public bool SwayXInverted; - public float SwaySmoothing; - public float SwayResetSmoothing; - public float SwayClampX; - public float SwayClampY; - } + [Header("Speed Effectors")] + public float SpeedEffector = 1; + public float CrouchSpeedEffector; + public float ProneSpeedEffector; + public float FallingSpeedEffector; + } - #endregion + [Serializable] + public class CharacterStance + { + public float CameraHeight; + public CapsuleCollider StanceCollider; + } + + #endregion + + #region - Weapons - + + [Serializable] + public class WeaponSettingsModel + { + [Header("Sway")] + public float SwayAmount; + public bool SwayYInverted; + public bool SwayXInverted; + public float SwaySmoothing; + public float SwayResetSmoothing; + public float SwayClampX; + public float SwayClampY; + } + + #endregion } diff --git a/Assets/Scripts/Managers/GameManager.cs b/Assets/Scripts/Managers/GameManager.cs index b91908d..7720a90 100755 --- a/Assets/Scripts/Managers/GameManager.cs +++ b/Assets/Scripts/Managers/GameManager.cs @@ -1,76 +1,123 @@ -using Unity.MLAgents; +using System; +using Unity.MLAgents; using UnityEngine; -using System; public class GameManager : MonoBehaviour { private static GameManager instance; - public static GameManager Instance { get { return instance; } } + public static GameManager Instance => instance; - private static SimpleMultiAgentGroup DefendersTeam = new SimpleMultiAgentGroup(); - private static SimpleMultiAgentGroup AttackersTeam = new SimpleMultiAgentGroup(); + private static SimpleMultiAgentGroup defendersTeam = new SimpleMultiAgentGroup(); + private static SimpleMultiAgentGroup attackersTeam = new SimpleMultiAgentGroup(); private void Awake() { - if (Instance == null) + if (instance is null) instance = this; - else if (Instance == this) + else + { Destroy(gameObject); + Debug.LogError("Only 1 Instance"); + } } private void Start() { Academy.Instance.OnEnvironmentReset += ResetScene; - GlobalEventManager.onCaptureFlag += flagCaptured; - GlobalEventManager.onTimeLeft += timeOut; + GlobalEventManager.onCaptureFlag += FlagCaptured; + GlobalEventManager.onTimeLeft += TimeOut; var agents = GameObject.FindObjectsOfType(); foreach (var item in agents) { var agent = item as NPC; if (agent.GetCharacter.Team == Team.Attackers) - AttackersTeam.RegisterAgent(agent); + attackersTeam.RegisterAgent(item); else - DefendersTeam.RegisterAgent(agent); + defendersTeam.RegisterAgent(item); } } - public static bool IsCloserToEnemyThanToNextNavPoint(NavPoint navPoint, Vector3 currentTransform, Team team) + private static SimpleMultiAgentGroup getAgentList(Team team) { - SimpleMultiAgentGroup agentGroup; if (team == Team.Attackers) - agentGroup = AttackersTeam; + return attackersTeam; else - agentGroup = DefendersTeam; + return defendersTeam; + } - var distToNavPoint = (currentTransform - navPoint.position).magnitude; + public static bool IsCloserToEnemyThanToNextNavPoint(NavPoint navPoint, Vector3 currentTransform, Team oppositeTeam) + { + var agentGroup = getAgentList(oppositeTeam); + + var distToNavPoint = (currentTransform - navPoint.Position).magnitude; foreach (var agent in agentGroup.GetRegisteredAgents()) if (distToNavPoint > (currentTransform - agent.transform.position).magnitude) return true; + if ((SettingsReader.Instance.GetSettings.HasHumanAttacker == true && oppositeTeam == Team.Attackers) || + (SettingsReader.Instance.GetSettings.HasHumanDefender == true && oppositeTeam == Team.Defenders)) + { + if (distToNavPoint > (currentTransform - CharacterFactory.Instance.player.transform.position).magnitude) + return true; + } return false; } - public static bool IsEnemyNearby(Vector3 currentTransform, Team team) + public static bool IsEnemyNearby(Vector3 currentTransform, Team oppositeTeam) { - SimpleMultiAgentGroup agentGroup; - if (team == Team.Attackers) - agentGroup = AttackersTeam; - else - agentGroup = DefendersTeam; + var agentGroup = getAgentList(oppositeTeam); foreach (var agent in agentGroup.GetRegisteredAgents()) if ((currentTransform - agent.transform.position).magnitude < SettingsReader.Instance.GetSettings.ViewDistance) return true; + if ((SettingsReader.Instance.GetSettings.HasHumanAttacker == true && oppositeTeam == Team.Attackers) || + (SettingsReader.Instance.GetSettings.HasHumanDefender == true && oppositeTeam == Team.Defenders)) + { + if ((currentTransform - CharacterFactory.Instance.player.transform.position).magnitude < SettingsReader.Instance.GetSettings.ViewDistance) + return true; + } return false; } public static bool IsCloserToFlagFromNextNavPoint(NavPoint navPoint, Vector3 currentTransform) => navPoint.FlagDistance < (currentTransform - GameObject.FindGameObjectWithTag("Flag").transform.position).magnitude; - private void flagCaptured(Team team) + public static bool IsHaveSeenByEnemy(Team oppositeTeam, Vector3 position) { - switch(team) + var agentGroup = getAgentList(oppositeTeam); + RaycastHit rayHit = new RaycastHit(); + foreach (var agent in agentGroup.GetRegisteredAgents() ) + { + var npc = agent as NPC; + if (Physics.Raycast(position, + (npc.NpcBodyState.GetPointToHit(npc.gameObject) - position).normalized, + out rayHit, + SettingsReader.Instance.GetSettings.ViewDistance)) + { + if (rayHit.collider.gameObject.GetComponent() != null) + return true; + } + } + if ((SettingsReader.Instance.GetSettings.HasHumanAttacker == true && oppositeTeam == Team.Attackers) || + (SettingsReader.Instance.GetSettings.HasHumanDefender == true && oppositeTeam == Team.Defenders)) + { + var player = CharacterFactory.Instance.player; + if (Physics.Raycast(position, + (player.GetComponent().bounds.center - position).normalized, + out rayHit, + SettingsReader.Instance.GetSettings.ViewDistance)) + { + if (rayHit.collider.gameObject.GetComponent() != null) + return true; + } + } + return false; + } + + private void FlagCaptured(Team team) + { + switch (team) { case Team.Attackers: Debug.Log("Attackers Win"); @@ -78,21 +125,19 @@ public class GameManager : MonoBehaviour case Team.Defenders: Debug.Log("Defenders Win"); break; - default: - Debug.LogError("Unexpected Team"); - break; } + ResetScene(); } - private void timeOut() + private void TimeOut() { - Debug.Log("Time is out"); + ResetScene(); } private void OnDestroy() { - GlobalEventManager.onCaptureFlag -= flagCaptured; - GlobalEventManager.onTimeLeft -= timeOut; + GlobalEventManager.onCaptureFlag -= FlagCaptured; + GlobalEventManager.onTimeLeft -= TimeOut; } public static event Action OnResetScene; diff --git a/Assets/Scripts/Managers/MapManager.cs b/Assets/Scripts/Managers/MapManager.cs index 8c64208..b28dcba 100755 --- a/Assets/Scripts/Managers/MapManager.cs +++ b/Assets/Scripts/Managers/MapManager.cs @@ -3,17 +3,62 @@ using UnityEngine; public class MapManager : MonoBehaviour { - public static List navPoints { get; private set; } - private void Start() + private static MapManager instance; + public static MapManager Instance => instance; + private static List navPoints = new List(); + private static Dictionary iDToNavPoint = new Dictionary(); + public static List NavPoints { get => navPoints; private set => navPoints = value; } + public static Dictionary IDToNavPoint { get => iDToNavPoint; private set => iDToNavPoint = value; } + + private void Awake() { - var i = 0; - navPoints = new List(); - var navPointsGameObj = GameObject.FindGameObjectsWithTag("Point"); - foreach (var gameobj in navPointsGameObj) + if (instance is null) + instance = this; + else { - var navpoint = gameobj.GetComponent(); - navpoint.PointId = i; i++; - navPoints.Add(navpoint); + Destroy(gameObject); + Debug.LogError("Only 1 Instance"); } } + + private void Start() + { + var navPointSet = GameObject.Find("NavPoint Set"); + var count = navPointSet.transform.childCount; + for (int i=0; i < count; i++) + NavPoints.Add(navPointSet.transform.GetChild(i) + .gameObject.GetComponent()); + + NavPointSetToID(); + } + + private void NavPointSetToID() + { + int i = 0; + foreach (var navPoint in NavPoints) + { + IDToNavPoint.Add(i, navPoint); + navPoint.PointId = i; + i++; + } + } + + public static void AddDeathAttributeToPoints(int startPoint, int endPoint, + float allDistance, float remainingDistance) + { + var startNavPoint = IDToNavPoint[startPoint]; + var endNavPoint = IDToNavPoint[endPoint]; + float coef; + try + { + coef = remainingDistance / allDistance; + } + catch (System.ArithmeticException) + { + Debug.LogError("Path Length is zero"); + return; + } + startNavPoint.DeathAttr += 1 - coef; + endNavPoint.DeathAttr += coef; + } } diff --git a/Assets/Scripts/Managers/TimeManager.cs b/Assets/Scripts/Managers/TimeManager.cs index ea7fec4..4aa32a1 100755 --- a/Assets/Scripts/Managers/TimeManager.cs +++ b/Assets/Scripts/Managers/TimeManager.cs @@ -1,6 +1,4 @@ -using System.Collections; -using System.Collections.Generic; -using UnityEngine; +using UnityEngine; public class TimeManager : MonoBehaviour { @@ -17,12 +15,14 @@ public class TimeManager : MonoBehaviour } else { - Debug.LogError("Only one Instance"); + Debug.LogError("Only 1 Instance"); Destroy(gameObject); } } void Update() { CurrentTime += Time.deltaTime; + if (CurrentTime > SettingsReader.Instance.GetSettings.TimeOut) + GlobalEventManager.SendTimeout(); } } diff --git a/Assets/Scripts/Misc/FlagZone.cs b/Assets/Scripts/Misc/FlagZone.cs index c6328dd..c66d5bb 100755 --- a/Assets/Scripts/Misc/FlagZone.cs +++ b/Assets/Scripts/Misc/FlagZone.cs @@ -1,6 +1,4 @@ -using System.Collections; -using System.Collections.Generic; -using UnityEngine; +using UnityEngine; public class FlagZone : MonoBehaviour { @@ -9,8 +7,8 @@ public class FlagZone : MonoBehaviour public float TimeStayDefenders { get; private set; } private int occupDefenders; private int occupAttackers; - public bool isOccupBoth => (occupDefenders>0) && (occupAttackers>0); - public bool isNotOccup => (occupDefenders == 0) && (occupAttackers == 0); + public bool IsOccupBoth => (occupDefenders > 0) && (occupAttackers > 0); + public bool IsNotOccup => (occupDefenders == 0) && (occupAttackers == 0); private float timeForWin; private void Start() @@ -24,7 +22,7 @@ public class FlagZone : MonoBehaviour } private void OnTriggerEnter(Collider other) { - switch(other.tag) + switch (other.tag) { case "Defender": occupDefenders++; @@ -54,7 +52,7 @@ public class FlagZone : MonoBehaviour } private void Update() { - if (isOccupBoth || isNotOccup) + if (IsOccupBoth || IsNotOccup) { TimeStayAttackers = 0; TimeStayDefenders = 0; @@ -64,7 +62,7 @@ public class FlagZone : MonoBehaviour { TimeStayAttackers += Time.deltaTime; if (TimeStayAttackers > timeForWin) - GlobalEventManager.SendCaptureFlag(Team.Attackers); + GlobalEventManager.SendCaptureFlag(Team.Attackers); } else { diff --git a/Assets/Scripts/Misc/NavPoint.cs b/Assets/Scripts/Misc/NavPoint.cs index 2b43e84..a70fd61 100755 --- a/Assets/Scripts/Misc/NavPoint.cs +++ b/Assets/Scripts/Misc/NavPoint.cs @@ -1,20 +1,28 @@ -using System.Collections; -using System.Collections.Generic; +using System.Collections.Generic; +using System; using UnityEngine; +public enum NavPointType +{ + Cover, + Direction, +} + + public class NavPoint : MonoBehaviour { - public Vector3 position => gameObject.transform.position; + public Vector3 Position => gameObject.transform.position; public float FlagDistance { get; private set; } + public NavPointType navType = NavPointType.Direction; + [HideInInspector] - public int? PointId; + public int PointId = 0; public float DeathAttr = 0; public List EnemiesSeen = new List(); - //Here other attributes; private void Start() { - FlagDistance = (GameObject.FindGameObjectWithTag("Flag").transform.position - position).magnitude; + FlagDistance = (GameObject.FindGameObjectWithTag("Flag").transform.position - Position).magnitude; } } diff --git a/Assets/Scripts/Misc/Settings.cs b/Assets/Scripts/Misc/Settings.cs index b1b7ea8..4e333fc 100755 --- a/Assets/Scripts/Misc/Settings.cs +++ b/Assets/Scripts/Misc/Settings.cs @@ -1,6 +1,6 @@ using UnityEngine; -[CreateAssetMenu(fileName ="Game Settings", menuName = "Game/Settings", order = 51)] +[CreateAssetMenu(fileName = "Game Settings", menuName = "Game/Settings", order = 51)] public class Settings : ScriptableObject { public bool IsTesting; @@ -36,4 +36,6 @@ public class Settings : ScriptableObject public float DoDamageChanceInDirectPoint; public float DoDamageChanceInRunning; public float DoDamageChanceInCover; + + public float CrouchingCoefficient; } diff --git a/Assets/Scripts/Misc/SettingsReader.cs b/Assets/Scripts/Misc/SettingsReader.cs index ab60b87..9e709ad 100755 --- a/Assets/Scripts/Misc/SettingsReader.cs +++ b/Assets/Scripts/Misc/SettingsReader.cs @@ -1,17 +1,21 @@ -using System.Collections; -using System.Collections.Generic; -using UnityEngine; +using UnityEngine; public class SettingsReader : MonoBehaviour { private static SettingsReader instance; - public static SettingsReader Instance { get { return instance; } } + public static SettingsReader Instance => instance; private void Awake() { - instance = this; + if (instance is null) + instance = this; + else + { + Destroy(gameObject); + Debug.LogError("Only 1 Instance"); + } } [SerializeField] private Settings gameSettings; - public Settings GetSettings { get { return gameSettings; } } + public Settings GetSettings => gameSettings; } diff --git a/Assets/Scripts/Misc/Statistics.cs b/Assets/Scripts/Misc/Statistics.cs deleted file mode 100755 index 98be783..0000000 --- a/Assets/Scripts/Misc/Statistics.cs +++ /dev/null @@ -1,9 +0,0 @@ -using UnityEngine; - -public class Statistics : MonoBehaviour -{ - private void Start() - { - - } -} diff --git a/Assets/Scripts/Pickups/AmmoPickUp.cs b/Assets/Scripts/Pickups/AmmoPickUp.cs index 8439c21..e7af1ce 100755 --- a/Assets/Scripts/Pickups/AmmoPickUp.cs +++ b/Assets/Scripts/Pickups/AmmoPickUp.cs @@ -1,5 +1,4 @@ -using System; -using UnityEngine; +using UnityEngine; [RequireComponent(typeof(BoxCollider))] public class AmmoPickUp : MonoBehaviour, IPickable @@ -11,6 +10,11 @@ public class AmmoPickUp : MonoBehaviour, IPickable PickObject(other.gameObject); } + private void OnDestroy() + { + Debug.LogWarning("Pooled object was destroyed"); + } + public void PickObject(GameObject obj) { obj.GetComponent()?.GetCharacter.Condition.TakeAmmo(SettingsReader.Instance.GetSettings.AmmunitionPickupAmount); diff --git a/Assets/Scripts/Pickups/ArmourPickUp.cs b/Assets/Scripts/Pickups/ArmourPickUp.cs index c68e491..b5303f6 100755 --- a/Assets/Scripts/Pickups/ArmourPickUp.cs +++ b/Assets/Scripts/Pickups/ArmourPickUp.cs @@ -1,5 +1,4 @@ -using System; -using UnityEngine; +using UnityEngine; [RequireComponent(typeof(BoxCollider))] public class ArmourPickUp : MonoBehaviour, IPickable @@ -11,6 +10,11 @@ public class ArmourPickUp : MonoBehaviour, IPickable PickObject(other.gameObject); } + private void OnDestroy() + { + Debug.LogWarning("Pooled object was destroyed"); + } + public void PickObject(GameObject obj) { obj.GetComponent()?.GetCharacter.Condition.GiveArmour(SettingsReader.Instance.GetSettings.ArmourPickupAmount); diff --git a/Assets/Scripts/Pickups/HealthPickUp.cs b/Assets/Scripts/Pickups/HealthPickUp.cs index 36f81ba..ba8d136 100755 --- a/Assets/Scripts/Pickups/HealthPickUp.cs +++ b/Assets/Scripts/Pickups/HealthPickUp.cs @@ -1,5 +1,4 @@ -using System; -using UnityEngine; +using UnityEngine; [RequireComponent(typeof(BoxCollider))] public class HealthPickUp : MonoBehaviour, IPickable @@ -11,6 +10,11 @@ public class HealthPickUp : MonoBehaviour, IPickable PickObject(other.gameObject); } + private void OnDestroy() + { + Debug.LogWarning("Pooled object was destroyed"); + } + public void PickObject(GameObject obj) { obj.GetComponent()?.GetCharacter.Condition.GiveHealth(SettingsReader.Instance.GetSettings.HealthPickupAmount); diff --git a/Assets/Scripts/Pickups/IPickable.cs b/Assets/Scripts/Pickups/IPickable.cs index fb218d0..37cced3 100755 --- a/Assets/Scripts/Pickups/IPickable.cs +++ b/Assets/Scripts/Pickups/IPickable.cs @@ -1,7 +1,6 @@ -using System; -using UnityEngine; +using UnityEngine; public interface IPickable { - PickUpType type { get; } + PickUpType type { get; } void PickObject(GameObject obj); } \ No newline at end of file diff --git a/Assets/Scripts/Pickups/PickUpSpawner.cs b/Assets/Scripts/Pickups/PickUpSpawner.cs index 7a29705..f9c320b 100755 --- a/Assets/Scripts/Pickups/PickUpSpawner.cs +++ b/Assets/Scripts/Pickups/PickUpSpawner.cs @@ -44,25 +44,25 @@ public class PickUpSpawner : MonoBehaviour private IEnumerator SpawnNewPickUps() { - while(true) + while (true) { GameObject item; - if(IsDisableCheck(out item)) + if (IsDisableCheck(out item)) { yield return new WaitForSeconds(3); if (item != null) { - item.transform.position = spawnPoints[Random.Range(0, spawnPoints.Count)].position; + item.transform.position = spawnPoints[Random.Range(0, spawnPoints.Count)].Position; item.SetActive(true); } } - yield return new WaitForSeconds(2); + yield return new WaitForSeconds(2); } } private bool IsDisableCheck(out GameObject gameobj) { - foreach(var pick in pickups) + foreach (var pick in pickups) { if (!pick.activeInHierarchy) { diff --git a/Assets/Scripts/Sensors/SensorType.cs b/Assets/Scripts/Sensors/SensorType.cs deleted file mode 100755 index 8d85790..0000000 --- a/Assets/Scripts/Sensors/SensorType.cs +++ /dev/null @@ -1,6 +0,0 @@ -public enum SensorType -{ - Visual, - Sound, - Other -} diff --git a/Assets/Scripts/Sensors/Sensors.cs b/Assets/Scripts/Sensors/Sensors.cs deleted file mode 100644 index b38d85f..0000000 --- a/Assets/Scripts/Sensors/Sensors.cs +++ /dev/null @@ -1,4 +0,0 @@ -using System.Collections.Generic; -using Unity.MLAgents.Sensors; - - diff --git a/Assets/Scripts/Statistics.meta b/Assets/Scripts/Statistics.meta new file mode 100644 index 0000000..4e72120 --- /dev/null +++ b/Assets/Scripts/Statistics.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 3a9f7f0a9faf11f49a433480722bffc5 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Scripts/Statistics/Logger.cs b/Assets/Scripts/Statistics/Logger.cs new file mode 100644 index 0000000..e293d25 --- /dev/null +++ b/Assets/Scripts/Statistics/Logger.cs @@ -0,0 +1,19 @@ +using System.IO; +using UnityEngine; + +public class Logger +{ + private const string directory = "/Logs/"; + private const string baseName = "Log#"; + + public static void SaveLog(T objToSerialize) + { + string dir = Application.persistentDataPath + directory; + if (!Directory.Exists(dir)) + Directory.CreateDirectory(dir); + + var logName = baseName + (Directory.GetFiles(dir).Length + 1).ToString(); + string json = JsonUtility.ToJson(objToSerialize); + File.WriteAllText(dir + logName, json); + } +} \ No newline at end of file diff --git a/Assets/Scripts/Statistics/Logger.cs.meta b/Assets/Scripts/Statistics/Logger.cs.meta new file mode 100644 index 0000000..e455173 --- /dev/null +++ b/Assets/Scripts/Statistics/Logger.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: b3a1cec894fa98b4bbe20470f1e316c4 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Scripts/Statistics/StatisticManager.cs b/Assets/Scripts/Statistics/StatisticManager.cs new file mode 100644 index 0000000..399e7fb --- /dev/null +++ b/Assets/Scripts/Statistics/StatisticManager.cs @@ -0,0 +1,51 @@ +using UnityEngine; + +internal class Log +{ + public int damageTakenByDefs = 0; + public int damageTakenByAtc = 0; + + public int AtcWin = 0; + public int DefWin = 0; + + public int TimeOuts = 0; +} + +public class StatisticManager : MonoBehaviour +{ + private Log log = new Log(); + private void Awake() + { + foreach (var npc in GameObject.FindObjectsOfType()) + npc.OnDamageRecieved += RegisterDamage; + + GlobalEventManager.onCaptureFlag += RegisterWin; + GlobalEventManager.onTimeLeft += RegisterTimeOut; + } + + private void RegisterDamage(int damage, Team team) + { + if (team == Team.Attackers) + log.damageTakenByAtc += damage; + else + log.damageTakenByDefs += damage; + } + + private void RegisterWin(Team team) + { + if (team == Team.Attackers) + log.AtcWin += 1; + else + log.DefWin += 1; + } + + private void RegisterTimeOut() + { + log.TimeOuts += 1; + } + + private void OnApplicationQuit() + { + Logger.SaveLog(log); + } +} diff --git a/Assets/Scripts/Misc/Statistics.cs.meta b/Assets/Scripts/Statistics/StatisticManager.cs.meta old mode 100755 new mode 100644 similarity index 91% rename from Assets/Scripts/Misc/Statistics.cs.meta rename to Assets/Scripts/Statistics/StatisticManager.cs.meta index b8be93a..3a27c34 --- a/Assets/Scripts/Misc/Statistics.cs.meta +++ b/Assets/Scripts/Statistics/StatisticManager.cs.meta @@ -4,7 +4,7 @@ MonoImporter: externalObjects: {} serializedVersion: 2 defaultReferences: [] - executionOrder: 0 + executionOrder: 300 icon: {instanceID: 0} userData: assetBundleName: diff --git a/Assets/Scripts/Utils/SerializableDictionary.cs b/Assets/Scripts/Utils/SerializableDictionary.cs index c0877b9..8cb258e 100755 --- a/Assets/Scripts/Utils/SerializableDictionary.cs +++ b/Assets/Scripts/Utils/SerializableDictionary.cs @@ -1,10 +1,10 @@ using System; -using System.Linq; using System.Collections; using System.Collections.Generic; using System.Diagnostics; -using UnityEngine; +using System.Linq; using UnityEditor; +using UnityEngine; using UnityObject = UnityEngine.Object; [Serializable, DebuggerDisplay("Count = {Count}")] diff --git a/Assets/Scripts/Weapons/scr_WeaponController.cs b/Assets/Scripts/Weapons/scr_WeaponController.cs index 2fc59e5..613fc61 100755 --- a/Assets/Scripts/Weapons/scr_WeaponController.cs +++ b/Assets/Scripts/Weapons/scr_WeaponController.cs @@ -1,17 +1,16 @@ -using System; -using UnityEngine; +using UnityEngine; using static scr_Models; public class scr_WeaponController : MonoBehaviour { private scr_CharacterController characterController; - [Header("Settings")] + [Header("Settings")] public WeaponSettingsModel settings; private bool isInitialised; Vector3 newWeaponRotation; Vector3 newWeaponRotationVelocity; - + Vector3 targetWeaponRotation; Vector3 targetWeaponRotationVelocity; @@ -32,17 +31,17 @@ public class scr_WeaponController : MonoBehaviour { return; } - + targetWeaponRotation.y += settings.SwayAmount * (settings.SwayXInverted ? -characterController.input_View.x : characterController.input_View.x) * Time.deltaTime; - targetWeaponRotation.x += settings.SwayAmount * (settings.SwayYInverted ? characterController.input_View.y : -characterController.input_View.y) * Time.deltaTime; + targetWeaponRotation.x += settings.SwayAmount * (settings.SwayYInverted ? characterController.input_View.y : -characterController.input_View.y) * Time.deltaTime; //newWeaponRotation.x = Mathf.Clamp(newWeaponRotation.x, ViewClampYMin, ViewClampYMax); - + targetWeaponRotation.x = Mathf.Clamp(targetWeaponRotation.x, -settings.SwayClampX, settings.SwayClampX); targetWeaponRotation.y = Mathf.Clamp(targetWeaponRotation.y, -settings.SwayClampY, settings.SwayClampY); targetWeaponRotation = Vector3.SmoothDamp(targetWeaponRotation, Vector3.zero, ref targetWeaponRotationVelocity, settings.SwayResetSmoothing); newWeaponRotation = Vector3.SmoothDamp(newWeaponRotation, targetWeaponRotation, ref newWeaponRotationVelocity, settings.SwaySmoothing); - + transform.localRotation = Quaternion.Euler(newWeaponRotation); } } From 3420d14cdc0bc595b4e5b0eee3bb8d7b508fb4b7 Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Wed, 4 May 2022 23:57:38 +0700 Subject: [PATCH 10/27] merge --- .../Timers/Greatest_map_ever_timers.json | 2 +- Assets/Prefabs/Bot.prefab | 3 +- .../Greatest_map_ever/Greatest_map_ever.unity | 29 ++++++++++- Assets/Settings/Game Settings.asset | 50 ++++++++++--------- 4 files changed, 57 insertions(+), 27 deletions(-) diff --git a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json index 9a97eb3..b96488e 100644 --- a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json +++ b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json @@ -1 +1 @@ -{"count":1,"self":16.593504,"total":17.166309,"children":{"InitializeActuators":{"count":2,"self":0.0019996,"total":0.0019996,"children":null},"InitializeSensors":{"count":2,"self":0.0030004,"total":0.0030004,"children":null},"AgentSendState":{"count":612,"self":0.016998,"total":0.549809,"children":{"CollectObservations":{"count":1224,"self":0.0230042,"total":0.0230042,"children":null},"WriteActionMask":{"count":1224,"self":0.0060088,"total":0.0060088,"children":null},"RequestDecision":{"count":1224,"self":0.503798,"total":0.503798,"children":null}}},"DecideAction":{"count":612,"self":0.0089991,"total":0.0089991,"children":null},"AgentAct":{"count":612,"self":0.006997,"total":0.006997,"children":null}},"gauges":{"My Behavior.CumulativeReward":{"count":14,"max":0,"min":0,"runningAverage":0,"value":0,"weightedAverage":0}},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1650253950","unity_version":"2019.4.35f1","command_line_arguments":"C:\\Program Files\\unityeditorfolder\\2019.4.35f1\\Editor\\Unity.exe -projectpath F:\\SigmaRiskManagment\\real shooter Git Version -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-1IWpvtxiu_rvPpHhMWpzt -hubSessionId b1d8b690-be9f-11ec-92bc-6fd1276b6775 -accessToken D1AF5mitRE4Vh3s6p7rpmGuqcqvTsZJKmoGtSNfyNNs009f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"Greatest_map_ever","end_time_seconds":"1650253967"}} \ No newline at end of file +{"count":1,"self":5.2745576,"total":5.3091238,"children":{"InitializeActuators":{"count":2,"self":0.0019998999999999998,"total":0.0019998999999999998,"children":null},"InitializeSensors":{"count":2,"self":0.0030009999999999998,"total":0.0030009999999999998,"children":null},"AgentSendState":{"count":18,"self":0.0019986,"total":0.0175794,"children":{"CollectObservations":{"count":36,"self":0.0009994,"total":0.0009994,"children":null},"WriteActionMask":{"count":36,"self":0.0005783,"total":0.0005783,"children":null},"RequestDecision":{"count":36,"self":0.0140031,"total":0.0140031,"children":null}}},"DecideAction":{"count":18,"self":0.0089876,"total":0.0089876,"children":null},"AgentAct":{"count":18,"self":0.0009983,"total":0.0009983,"children":null}},"gauges":{"My Behavior.CumulativeReward":{"count":2,"max":0,"min":0,"runningAverage":0,"value":0,"weightedAverage":0}},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1651670021","unity_version":"2019.4.35f1","command_line_arguments":"C:\\Program Files\\unityeditorfolder\\2019.4.35f1\\Editor\\Unity.exe -projectpath F:\\SigmaRiskManagment\\real shooter Git Version -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-9THNgaHTf51SEKo5URf9r -hubSessionId bf0b1ec0-c926-11ec-9066-7725c5249b8e -accessToken pLNs79-R0y77otVjQKfPq_jck-T6ncSnk21wIQRhdZw009f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"Greatest_map_ever","end_time_seconds":"1651670026"}} \ No newline at end of file diff --git a/Assets/Prefabs/Bot.prefab b/Assets/Prefabs/Bot.prefab index eedbf35..619d624 100755 --- a/Assets/Prefabs/Bot.prefab +++ b/Assets/Prefabs/Bot.prefab @@ -143,7 +143,7 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: m_BrainParameters: - VectorObservationSize: 1 + VectorObservationSize: 9 NumStackedVectorObservations: 1 m_ActionSpec: m_NumContinuousActions: 0 @@ -195,6 +195,7 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: navMeshAgent: {fileID: 8656710265340117963} + flag: {fileID: 6818223691859422291, guid: 1685c1d9ce4ab174f95c646b1826010b, type: 3} --- !u!114 &1208561866453126566 MonoBehaviour: m_ObjectHideFlags: 0 diff --git a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity index 41d83a6..3bcfbee 100755 --- a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity +++ b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity @@ -259,6 +259,8 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: + navType: 1 + PointId: 0 DeathAttr: 0 EnemiesSeen: [] --- !u!23 &140697607 @@ -353,6 +355,8 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: + navType: 1 + PointId: 0 DeathAttr: 0 EnemiesSeen: [] --- !u!23 &293522541 @@ -1315,6 +1319,8 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: + navType: 1 + PointId: 0 DeathAttr: 0 EnemiesSeen: [] --- !u!23 &1116745545 @@ -1477,7 +1483,7 @@ Mesh: m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_Name: pb_Mesh17416 + m_Name: pb_Mesh16900 serializedVersion: 10 m_SubMeshes: - serializedVersion: 2 @@ -1763,6 +1769,8 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: + navType: 1 + PointId: 0 DeathAttr: 0 EnemiesSeen: [] --- !u!23 &1345085343 @@ -1857,6 +1865,8 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: + navType: 1 + PointId: 0 DeathAttr: 0 EnemiesSeen: [] --- !u!23 &1488699524 @@ -2077,6 +2087,8 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: + navType: 1 + PointId: 0 DeathAttr: 0 EnemiesSeen: [] --- !u!23 &1663305224 @@ -2141,6 +2153,7 @@ GameObject: - component: {fileID: 1858987090} - component: {fileID: 1858987088} - component: {fileID: 1858987085} + - component: {fileID: 1858987091} m_Layer: 0 m_Name: Game m_TagString: Untagged @@ -2253,6 +2266,18 @@ MonoBehaviour: type: 3} PlayerPrefab: {fileID: 5245491127989480125, guid: 80f6c1c85e5daed4c96c70205ed5503d, type: 3} +--- !u!114 &1858987091 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1858987083} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: bf3fe86787bfb0c4b8751fe495148ede, type: 3} + m_Name: + m_EditorClassIdentifier: --- !u!1 &1871257865 GameObject: m_ObjectHideFlags: 0 @@ -2506,6 +2531,8 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: + navType: 1 + PointId: 0 DeathAttr: 0 EnemiesSeen: [] --- !u!23 &2004854095 diff --git a/Assets/Settings/Game Settings.asset b/Assets/Settings/Game Settings.asset index f1a62c0..fc07488 100755 --- a/Assets/Settings/Game Settings.asset +++ b/Assets/Settings/Game Settings.asset @@ -12,27 +12,29 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: e2c47233b9062c84482336b145c6891b, type: 3} m_Name: Game Settings m_EditorClassIdentifier: - isTesting: 1 - timeToWin: 5 - timeOut: 1600 - movementDistance: 50 - movementSpeed: 3 - defTeamAI: 0 - atcTeamAI: 0 - numOfDefenders: 1 - numOfAttackers: 1 - hasHumanDefender: 1 - hasHumanAttacker: 0 - healthPickupAmount: 50 - armourPickupAmount: 50 - ammunitionPickupAmount: 120 - pickupsAmount: 2 - maxHealth: 0 - maxArmour: 0 - maxAmmo: 0 - GetHitChanceInDirectPoint: 0 - GetHitChanceInRunning: 0 - GetHitChanceInCover: 0 - DoDamageChanceInDirectPoint: 0 - DoDamageChanceInRunning: 0 - DoDamageChanceInCover: 0 + IsTesting: 0 + TimeToWin: 15 + TimeOut: 3600 + MovementDistance: 50 + MovementSpeed: 5 + DefTeamAI: 3 + AtcTeamAI: 3 + NumOfDefenders: 1 + NumOfAttackers: 1 + HasHumanDefender: 0 + HasHumanAttacker: 0 + HealthPickupAmount: 50 + ArmourPickupAmount: 50 + AmmunitionPickupAmount: 60 + PickupsAmount: 0 + MaxHealth: 100 + MaxArmour: 100 + MaxAmmo: 360 + ViewDistance: 100 + GetHitChanceInDirectPoint: 50 + GetHitChanceInRunning: 25 + GetHitChanceInCover: 20 + DoDamageChanceInDirectPoint: 70 + DoDamageChanceInRunning: 30 + DoDamageChanceInCover: 25 + CrouchingCoefficient: 1.4 From e689927b730e87ecd8ba02559bde925a7df5b09d Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Thu, 5 May 2022 00:55:51 +0700 Subject: [PATCH 11/27] dadwa --- StyleCop.Cache | 7477 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 7477 insertions(+) create mode 100644 StyleCop.Cache diff --git a/StyleCop.Cache b/StyleCop.Cache new file mode 100644 index 0000000..edcafe4 --- /dev/null +++ b/StyleCop.Cache @@ -0,0 +1,7477 @@ + + 12 + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 01:08:14.230 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + Using directives must be sorted alphabetically by the namespaces. + 2 + False + + + The class must have a documentation header. + 5 + False + + + The field must have a documentation header. + 7 + False + + + The property must have a documentation header. + 8 + False + + + Adjacent elements must be separated by a blank line. + 8 + False + + + The property must not be placed on a single line. The opening and closing curly brackets must each be placed on their own line. + 8 + False + + + All properties must be placed after all fields. + 10 + False + + + All properties must be placed after all fields. + 11 + False + + + All properties must be placed after all fields. + 12 + False + + + All properties must be placed after all fields. + 13 + False + + + All properties must be placed after all fields. + 15 + False + + + All properties must be placed after all fields. + 16 + False + + + The call to instance must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 8 + 213 + 220 + 8 + 53 + 8 + 60 + False + + + The field must have a documentation header. + 10 + False + + + The field must have a documentation header. + 11 + False + + + The field must have a documentation header. + 12 + False + + + Variable names and private field names must start with a lower-case letter: AIPrefab. + 12 + False + + + The field must have a documentation header. + 13 + False + + + Variable names and private field names must start with a lower-case letter: PlayerPrefab. + 13 + False + + + The field must have a documentation header. + 15 + False + + + Variable names and private field names must start with a lower-case letter: Bots. + 15 + False + + + The field must have a documentation header. + 16 + False + + + Variable names and private field names must start with a lower-case letter: Player. + 16 + False + + + The method must have a documentation header. + 18 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 21 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 23 + False + + + The call to instance must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 21 + 657 + 664 + 21 + 13 + 21 + 20 + False + + + The call to instance must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 20 + 626 + 633 + 20 + 13 + 20 + 20 + False + + + The call to Destroy must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 23 + 701 + 707 + 23 + 13 + 23 + 19 + False + + + The call to gameObject must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 23 + 709 + 718 + 23 + 21 + 23 + 30 + False + + + The method must have a documentation header. + 26 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 34 + False + + + The body of the for statement must be wrapped in opening and closing curly brackets. + 37 + False + + + The body of the for statement must be wrapped in opening and closing curly brackets. + 40 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 43 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 46 + False + + + All method parameters must be placed on the same line, or each parameter must be placed on a separate line. + 37 + False + + + If the method parameters are on separate lines, the first parameter must begin on the line beneath the name of the method. + 37 + False + + + The parameter must begin on the line after the previous parameter. + 37 + False + + + All method parameters must be placed on the same line, or each parameter must be placed on a separate line. + 40 + False + + + If the method parameters are on separate lines, the first parameter must begin on the line beneath the name of the method. + 40 + False + + + The parameter must begin on the line after the previous parameter. + 40 + False + + + All method parameters must be placed on the same line, or each parameter must be placed on a separate line. + 43 + False + + + If the method parameters are on separate lines, the first parameter must begin on the line beneath the name of the method. + 43 + False + + + The parameter must begin on the line after the previous parameter. + 43 + False + + + All method parameters must be placed on the same line, or each parameter must be placed on a separate line. + 46 + False + + + If the method parameters are on separate lines, the first parameter must begin on the line beneath the name of the method. + 46 + False + + + The parameter must begin on the line after the previous parameter. + 46 + False + + + The call to InstanciateEntity must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 37 + 1298 + 1314 + 37 + 13 + 37 + 29 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 38 + 1366 + 1392 + 38 + 17 + 38 + 43 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 38 + 1410 + 1436 + 38 + 61 + 38 + 87 + False + + + The call to InstanciateEntity must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 40 + 1514 + 1530 + 40 + 13 + 40 + 29 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 41 + 1582 + 1608 + 41 + 17 + 41 + 43 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 41 + 1626 + 1652 + 41 + 61 + 41 + 87 + False + + + The call to InstanciateEntity must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 43 + 1705 + 1721 + 43 + 13 + 43 + 29 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 44 + 1772 + 1798 + 44 + 17 + 44 + 43 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 44 + 1816 + 1842 + 44 + 61 + 44 + 87 + False + + + The call to InstanciateEntity must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 46 + 1895 + 1911 + 46 + 13 + 46 + 29 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 47 + 1962 + 1988 + 47 + 17 + 47 + 43 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 47 + 2006 + 2032 + 47 + 61 + 47 + 87 + False + + + The call to ResetCharacters must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 49 + 2083 + 2097 + 49 + 37 + 49 + 51 + False + + + The method must have a documentation header. + 52 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 60 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 62 + False + + + The call to PlayerPrefab must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 55 + 2289 + 2300 + 55 + 40 + 55 + 51 + False + + + The call to AIPrefab must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 55 + 2304 + 2311 + 55 + 55 + 55 + 62 + False + + + The call to Player must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 67 + 2693 + 2698 + 67 + 13 + 67 + 18 + False + + + The call to Bots must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 73 + 2922 + 2925 + 73 + 13 + 73 + 16 + False + + + The method must have a documentation header. + 77 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 84 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 86 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 93 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 95 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 87 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 84 + 3236 + 3262 + 84 + 42 + 84 + 68 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 84 + 3280 + 3306 + 84 + 86 + 84 + 112 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 86 + 3386 + 3412 + 86 + 42 + 86 + 68 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 86 + 3430 + 3456 + 86 + 86 + 86 + 112 + False + + + The call to Bots must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 79 + 3036 + 3039 + 79 + 29 + 79 + 32 + False + + + The call to Player must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 93 + 3688 + 3693 + 93 + 17 + 93 + 22 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 93 + 3716 + 3742 + 93 + 45 + 93 + 71 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 93 + 3760 + 3786 + 93 + 89 + 93 + 115 + False + + + The call to Player must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 95 + 3841 + 3846 + 95 + 17 + 95 + 22 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 95 + 3869 + 3895 + 95 + 45 + 95 + 71 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 95 + 3913 + 3939 + 95 + 89 + 95 + 115 + False + + + The call to TryGetComponent<Player> must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 89 + 3524 + 3546 + 89 + 13 + 89 + 35 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.026 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The enum must have a documentation header. + 1 + False + + + The enumeration sub-item must have a documentation header. + 3 + False + + + The enumeration sub-item must have a documentation header. + 4 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.027 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The enum must have a documentation header. + 1 + False + + + The enumeration sub-item must have a documentation header. + 3 + False + + + The enumeration sub-item must have a documentation header. + 4 + False + + + The enumeration sub-item must have a documentation header. + 5 + False + + + The enumeration sub-item must have a documentation header. + 6 + False + + + The enumeration sub-item must have a documentation header. + 7 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.18 10:42:05.218 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 2 + False + + + Adjacent elements must be separated by a blank line. + 2 + False + + + All classes must be placed after all interfaces. + 15 + False + + + The property must have a documentation header. + 4 + False + + + All properties must be placed after all fields. + 4 + False + + + All properties must be placed after all constructors. + 4 + False + + + The field must have a documentation header. + 5 + False + + + Adjacent elements must be separated by a blank line. + 5 + False + + + Fields must be declared with private access. Use properties to expose fields. + 5 + False + + + The field must have a documentation header. + 6 + False + + + Fields must be declared with private access. Use properties to expose fields. + 6 + False + + + The constructor must have a documentation header. + 8 + False + + + The call to Condition must begin with the 'this.' prefix to indicate that the item is a member of the class. + 11 + 229 + 237 + 11 + 9 + 11 + 17 + False + + + The interface must have a documentation header. + 15 + False + + + The property must have a documentation header. + 17 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 01:00:00.568 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The code must not contain multiple blank lines in a row. + 4 + False + + + The class must have a documentation header. + 5 + False + + + The event must have a documentation header. + 7 + False + + + All events must be placed after all fields. + 11 + False + + + All events must be placed after all fields. + 35 + False + + + All events must be placed after all fields. + 48 + False + + + All events must be placed after all constructors. + 62 + False + + + The event must have a documentation header. + 8 + False + + + Adjacent elements must be separated by a blank line. + 8 + False + + + The event must have a documentation header. + 9 + False + + + Adjacent elements must be separated by a blank line. + 9 + False + + + The field must have a documentation header. + 11 + False + + + The property must have a documentation header. + 12 + False + + + Adjacent elements must be separated by a blank line. + 12 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 17 + False + + + The call to health must begin with the 'this.' prefix to indicate that the item is a member of the class. + 16 + 342 + 347 + 16 + 20 + 16 + 25 + False + + + Adjacent elements must be separated by a blank line. + 18 + False + + + The call to health must begin with the 'this.' prefix to indicate that the item is a member of the class. + 20 + 408 + 413 + 20 + 13 + 20 + 18 + False + + + The call to OnChangeHealthEvent must begin with the 'this.' prefix to indicate that the item is a member of the class. + 21 + 437 + 455 + 21 + 13 + 21 + 31 + False + + + The method must have a documentation header. + 25 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 28 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 30 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 32 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 33 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 34 + False + + + All methods must be placed after all properties. + 36 + False + + + All methods must be placed after all properties. + 49 + False + + + The call to health must begin with the 'this.' prefix to indicate that the item is a member of the class. + 27 + 558 + 563 + 27 + 13 + 27 + 18 + False + + + The call to health must begin with the 'this.' prefix to indicate that the item is a member of the class. + 29 + 612 + 617 + 29 + 18 + 29 + 23 + False + + + The call to health must begin with the 'this.' prefix to indicate that the item is a member of the class. + 31 + 666 + 671 + 31 + 18 + 31 + 23 + False + + + The field must have a documentation header. + 35 + False + + + Adjacent elements must be separated by a blank line. + 35 + False + + + The property must have a documentation header. + 36 + False + + + Adjacent elements must be separated by a blank line. + 36 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 47 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 41 + False + + + The call to armour must begin with the 'this.' prefix to indicate that the item is a member of the class. + 40 + 843 + 848 + 40 + 20 + 40 + 25 + False + + + Adjacent elements must be separated by a blank line. + 42 + False + + + The call to armour must begin with the 'this.' prefix to indicate that the item is a member of the class. + 44 + 907 + 912 + 44 + 13 + 44 + 18 + False + + + The call to OnChangeArmourEvent must begin with the 'this.' prefix to indicate that the item is a member of the class. + 45 + 936 + 954 + 45 + 13 + 45 + 31 + False + + + The field must have a documentation header. + 48 + False + + + Adjacent elements must be separated by a blank line. + 48 + False + + + The property must have a documentation header. + 49 + False + + + Adjacent elements must be separated by a blank line. + 49 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 54 + False + + + The call to ammo must begin with the 'this.' prefix to indicate that the item is a member of the class. + 53 + 1091 + 1094 + 53 + 20 + 53 + 23 + False + + + Adjacent elements must be separated by a blank line. + 55 + False + + + The call to ammo must begin with the 'this.' prefix to indicate that the item is a member of the class. + 57 + 1153 + 1156 + 57 + 13 + 57 + 16 + False + + + The call to OnChangeAmmunitionEvent must begin with the 'this.' prefix to indicate that the item is a member of the class. + 58 + 1180 + 1202 + 58 + 13 + 58 + 35 + False + + + The constructor must have a documentation header. + 62 + False + + + The call to ammo must begin with the 'this.' prefix to indicate that the item is a member of the class. + 65 + 1350 + 1353 + 65 + 9 + 65 + 12 + False + + + The call to health must begin with the 'this.' prefix to indicate that the item is a member of the class. + 66 + 1384 + 1389 + 66 + 9 + 66 + 14 + False + + + The call to armour must begin with the 'this.' prefix to indicate that the item is a member of the class. + 67 + 1422 + 1427 + 67 + 9 + 67 + 14 + False + + + The method must have a documentation header. + 70 + False + + + The call to HealthPoints must begin with the 'this.' prefix to indicate that the item is a member of the class. + 70 + 1503 + 1514 + 70 + 43 + 70 + 54 + False + + + The call to HealthPoints must begin with the 'this.' prefix to indicate that the item is a member of the class. + 70 + 1539 + 1550 + 70 + 79 + 70 + 90 + False + + + The method must have a documentation header. + 71 + False + + + Adjacent elements must be separated by a blank line. + 71 + False + + + The call to ArmourPoints must begin with the 'this.' prefix to indicate that the item is a member of the class. + 71 + 1605 + 1616 + 71 + 43 + 71 + 54 + False + + + The call to ArmourPoints must begin with the 'this.' prefix to indicate that the item is a member of the class. + 71 + 1641 + 1652 + 71 + 79 + 71 + 90 + False + + + The method must have a documentation header. + 72 + False + + + Adjacent elements must be separated by a blank line. + 72 + False + + + The call to Ammunition must begin with the 'this.' prefix to indicate that the item is a member of the class. + 72 + 1703 + 1712 + 72 + 39 + 72 + 48 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.28 18:24:37.771 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + Using directives must be sorted alphabetically by the namespaces. + 1 + False + + + System using directives must be placed before all other using directives. + 5 + False + + + The class must have a documentation header. + 8 + False + + + The code must not contain multiple blank lines in a row. + 17 + False + + + The property must have a documentation header. + 10 + False + + + All properties must be placed after all fields. + 13 + False + + + All properties must be placed after all fields. + 14 + False + + + All properties must be placed after all fields. + 15 + False + + + The property must have a documentation header. + 11 + False + + + Adjacent elements must be separated by a blank line. + 11 + False + + + The property must have a documentation header. + 12 + False + + + Adjacent elements must be separated by a blank line. + 12 + False + + + The field must have a documentation header. + 13 + False + + + Adjacent elements must be separated by a blank line. + 13 + False + + + The field must have a documentation header. + 14 + False + + + Constants must start with an upper-case letter: updateFlagPositionDelay. + 14 + False + + + The field must have a documentation header. + 15 + False + + + The method must have a documentation header. + 18 + False + + + All private methods must be placed after all public methods. + 34 + False + + + All private methods must be placed after all public methods. + 40 + False + + + All private methods must be placed after all public methods. + 47 + False + + + The call to navMeshAgent must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 20 + 555 + 566 + 20 + 9 + 20 + 20 + False + + + The call to InvokeRepeating must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 21 + 636 + 650 + 21 + 9 + 21 + 23 + False + + + The call to UpdateFlagPosition must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 21 + 659 + 676 + 21 + 32 + 21 + 49 + False + + + The method must have a documentation header. + 24 + False + + + The call to CancelInvoke must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 26 + 764 + 775 + 26 + 9 + 26 + 20 + False + + + The call to UpdateFlagPosition must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 26 + 784 + 801 + 26 + 29 + 26 + 46 + False + + + The method must have a documentation header. + 29 + False + + + The call to FlagDistance must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 31 + 870 + 881 + 31 + 9 + 31 + 20 + False + + + The call to flag must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 31 + 886 + 889 + 31 + 25 + 31 + 28 + False + + + The method must have a documentation header. + 34 + False + + + The call to goToNextNavPoint must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 37 + 1069 + 1084 + 37 + 9 + 37 + 24 + False + + + The method must have a documentation header. + 40 + False + + + method names begin with an upper-case letter: getPointsCandidate. + 40 + False + + + The method must have a documentation header. + 47 + False + + + method names begin with an upper-case letter: goToNextNavPoint. + 47 + False + + + The call to navMeshAgent must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 48 + 1492 + 1503 + 48 + 9 + 48 + 20 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.18 09:03:31.781 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The enum must have a documentation header. + 1 + False + + + The enumeration sub-item must have a documentation header. + 3 + False + + + The enumeration sub-item must have a documentation header. + 4 + False + + + The enumeration sub-item must have a documentation header. + 5 + False + + + The interface must have a documentation header. + 8 + False + + + Interface names must start with the capital letter 'I': NPC_BaseState. + 8 + False + + + The property must have a documentation header. + 10 + False + + + The property must have a documentation header. + 11 + False + + + Adjacent elements must be separated by a blank line. + 11 + False + + + The property must have a documentation header. + 12 + False + + + Adjacent elements must be separated by a blank line. + 12 + False + + + The property must have a documentation header. + 13 + False + + + Adjacent elements must be separated by a blank line. + 13 + False + + + The property must have a documentation header. + 14 + False + + + Adjacent elements must be separated by a blank line. + 14 + False + + + The property must have a documentation header. + 15 + False + + + Adjacent elements must be separated by a blank line. + 15 + False + + + The class must have a documentation header. + 18 + False + + + The property must have a documentation header. + 20 + False + + + The property must have a documentation header. + 21 + False + + + Adjacent elements must be separated by a blank line. + 21 + False + + + The property must have a documentation header. + 22 + False + + + Adjacent elements must be separated by a blank line. + 22 + False + + + The property must have a documentation header. + 23 + False + + + Adjacent elements must be separated by a blank line. + 23 + False + + + The property must have a documentation header. + 24 + False + + + Adjacent elements must be separated by a blank line. + 24 + False + + + The property must have a documentation header. + 25 + False + + + Adjacent elements must be separated by a blank line. + 25 + False + + + The class must have a documentation header. + 28 + False + + + A C# document may only contain a single class at the root level unless all of the classes are partial and are of the same type. + 28 + False + + + The property must have a documentation header. + 30 + False + + + The property must have a documentation header. + 31 + False + + + Adjacent elements must be separated by a blank line. + 31 + False + + + The property must have a documentation header. + 32 + False + + + Adjacent elements must be separated by a blank line. + 32 + False + + + The property must have a documentation header. + 33 + False + + + Adjacent elements must be separated by a blank line. + 33 + False + + + The property must have a documentation header. + 34 + False + + + Adjacent elements must be separated by a blank line. + 34 + False + + + The property must have a documentation header. + 35 + False + + + Adjacent elements must be separated by a blank line. + 35 + False + + + The class must have a documentation header. + 38 + False + + + The property must have a documentation header. + 40 + False + + + The property must have a documentation header. + 41 + False + + + Adjacent elements must be separated by a blank line. + 41 + False + + + The property must have a documentation header. + 42 + False + + + Adjacent elements must be separated by a blank line. + 42 + False + + + The property must have a documentation header. + 43 + False + + + Adjacent elements must be separated by a blank line. + 43 + False + + + The property must have a documentation header. + 44 + False + + + Adjacent elements must be separated by a blank line. + 44 + False + + + The property must have a documentation header. + 45 + False + + + Adjacent elements must be separated by a blank line. + 45 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.18 10:43:46.328 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 4 + False + + + The field must have a documentation header. + 7 + False + + + Fields must be declared with private access. Use properties to expose fields. + 7 + False + + + The field must have a documentation header. + 8 + False + + + Fields must be declared with private access. Use properties to expose fields. + 8 + False + + + The property must have a documentation header. + 10 + False + + + All properties must be placed after all events. + 23 + False + + + The call to PlayerCharacter must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 10 + 232 + 246 + 10 + 38 + 10 + 52 + False + + + The method must have a documentation header. + 12 + False + + + All private methods must be placed after all public methods. + 18 + False + + + All private methods must be placed after all public methods. + 24 + False + + + The call to PlayerCharacter must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 14 + 293 + 307 + 14 + 9 + 14 + 23 + False + + + The call to Condition must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 15 + 337 + 345 + 15 + 9 + 15 + 17 + False + + + The call to PlayerCharacter must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 15 + 349 + 363 + 15 + 21 + 15 + 35 + False + + + The method must have a documentation header. + 18 + False + + + The call to Condition must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 20 + 435 + 443 + 20 + 9 + 20 + 17 + False + + + The event must have a documentation header. + 23 + False + + + The method must have a documentation header. + 24 + False + + + Adjacent elements must be separated by a blank line. + 24 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 31 + False + + + Insert parentheses within the arithmetic expression to declare the operator precedence. + 27 + False + + + The call to PlayerCharacter must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 26 + 587 + 601 + 26 + 9 + 26 + 23 + False + + + The call to Condition must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 27 + 660 + 668 + 27 + 9 + 27 + 17 + False + + + The call to Condition must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 27 + 713 + 721 + 27 + 62 + 27 + 70 + False + + + The call to Condition must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 28 + 756 + 764 + 28 + 9 + 28 + 17 + False + + + The call to OnKilledEvent must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 31 + 884 + 896 + 31 + 13 + 31 + 25 + False + + + The call to Condition must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 30 + 836 + 844 + 30 + 13 + 30 + 21 + False + + + The method must have a documentation header. + 34 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.030 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 9 + False + + + An opening curly bracket must not be followed by a blank line. + 10 + False + + + A closing curly bracket must not be preceded by a blank line. + 259 + False + + + class names begin with an upper-case letter: scr_CharacterController. + 9 + False + + + The field must have a documentation header. + 12 + False + + + All private fields must be placed after all public fields. + 16 + False + + + All private fields must be placed after all public fields. + 22 + False + + + All private fields must be placed after all public fields. + 23 + False + + + All private fields must be placed after all public fields. + 26 + False + + + All private fields must be placed after all public fields. + 28 + False + + + All private fields must be placed after all public fields. + 29 + False + + + All private fields must be placed after all public fields. + 30 + False + + + All private fields must be placed after all public fields. + 33 + False + + + All private fields must be placed after all public fields. + 34 + False + + + All private fields must be placed after all public fields. + 37 + False + + + All private fields must be placed after all public fields. + 41 + False + + + All private fields must be placed after all public fields. + 42 + False + + + All private fields must be placed after all public fields. + 43 + False + + + All private fields must be placed after all public fields. + 44 + False + + + All private fields must be placed after all public fields. + 45 + False + + + All private fields must be placed after all public fields. + 56 + False + + + The field must have a documentation header. + 13 + False + + + The field must have a documentation header. + 14 + False + + + Field names must not contain underscores. + 14 + False + + + The field must have a documentation header. + 16 + False + + + Fields must be declared with private access. Use properties to expose fields. + 16 + False + + + Field names must not contain underscores. + 16 + False + + + Public and internal fields must start with an upper-case letter: input_View. + 16 + False + + + The field must have a documentation header. + 18 + False + + + The field must have a documentation header. + 19 + False + + + The field must have a documentation header. + 22 + False + + + Fields must be declared with private access. Use properties to expose fields. + 22 + False + + + Public and internal fields must start with an upper-case letter: cameraHolder. + 22 + False + + + The field must have a documentation header. + 23 + False + + + Fields must be declared with private access. Use properties to expose fields. + 23 + False + + + Public and internal fields must start with an upper-case letter: feetTransform. + 23 + False + + + The field must have a documentation header. + 26 + False + + + Fields must be declared with private access. Use properties to expose fields. + 26 + False + + + Public and internal fields must start with an upper-case letter: playerSettings. + 26 + False + + + The field must have a documentation header. + 28 + False + + + Fields must be declared with private access. Use properties to expose fields. + 28 + False + + + The field must have a documentation header. + 29 + False + + + Fields must be declared with private access. Use properties to expose fields. + 29 + False + + + The field must have a documentation header. + 30 + False + + + Fields must be declared with private access. Use properties to expose fields. + 30 + False + + + Public and internal fields must start with an upper-case letter: playerMask. + 30 + False + + + The field must have a documentation header. + 33 + False + + + Fields must be declared with private access. Use properties to expose fields. + 33 + False + + + Public and internal fields must start with an upper-case letter: gravityAmount. + 33 + False + + + The field must have a documentation header. + 34 + False + + + Fields must be declared with private access. Use properties to expose fields. + 34 + False + + + Public and internal fields must start with an upper-case letter: gravityMin. + 34 + False + + + The field must have a documentation header. + 35 + False + + + The field must have a documentation header. + 37 + False + + + Fields must be declared with private access. Use properties to expose fields. + 37 + False + + + Public and internal fields must start with an upper-case letter: jumpingForce. + 37 + False + + + The field must have a documentation header. + 38 + False + + + The field must have a documentation header. + 41 + False + + + Fields must be declared with private access. Use properties to expose fields. + 41 + False + + + Public and internal fields must start with an upper-case letter: playerStance. + 41 + False + + + The field must have a documentation header. + 42 + False + + + Fields must be declared with private access. Use properties to expose fields. + 42 + False + + + Public and internal fields must start with an upper-case letter: playerStanceSmoothing. + 42 + False + + + The field must have a documentation header. + 43 + False + + + Fields must be declared with private access. Use properties to expose fields. + 43 + False + + + Public and internal fields must start with an upper-case letter: playerStandStance. + 43 + False + + + The field must have a documentation header. + 44 + False + + + Fields must be declared with private access. Use properties to expose fields. + 44 + False + + + Public and internal fields must start with an upper-case letter: playerCrouchStance. + 44 + False + + + The field must have a documentation header. + 45 + False + + + Fields must be declared with private access. Use properties to expose fields. + 45 + False + + + Public and internal fields must start with an upper-case letter: playerProneStance. + 45 + False + + + The field must have a documentation header. + 46 + False + + + The field must have a documentation header. + 48 + False + + + The field must have a documentation header. + 49 + False + + + The field must have a documentation header. + 51 + False + + + The field must have a documentation header. + 53 + False + + + The field must have a documentation header. + 54 + False + + + The field must have a documentation header. + 56 + False + + + Fields must be declared with private access. Use properties to expose fields. + 56 + False + + + Public and internal fields must start with an upper-case letter: currentWeapon. + 56 + False + + + The method must have a documentation header. + 57 + False + + + Adjacent elements must be separated by a blank line. + 57 + False + + + A closing curly bracket must not be preceded by a blank line. + 84 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 59 + 1597 + 1608 + 59 + 9 + 59 + 20 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 61 + 1643 + 1654 + 61 + 9 + 61 + 20 + False + + + The call to input_Movement must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 61 + 1693 + 1706 + 61 + 59 + 61 + 72 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 62 + 1743 + 1754 + 62 + 9 + 62 + 20 + False + + + The call to input_View must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 62 + 1789 + 1798 + 62 + 55 + 62 + 64 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 63 + 1835 + 1846 + 63 + 9 + 63 + 20 + False + + + The call to Jump must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 63 + 1881 + 1884 + 63 + 55 + 63 + 58 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 65 + 1908 + 1919 + 65 + 9 + 65 + 20 + False + + + The call to Crouch must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 65 + 1956 + 1961 + 65 + 57 + 65 + 62 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 66 + 1975 + 1986 + 66 + 9 + 66 + 20 + False + + + The call to Prone must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 66 + 2022 + 2026 + 66 + 56 + 66 + 60 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 68 + 2050 + 2061 + 68 + 9 + 68 + 20 + False + + + The call to ToggleSprint must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 68 + 2098 + 2109 + 68 + 57 + 68 + 68 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 69 + 2123 + 2134 + 69 + 9 + 69 + 20 + False + + + The call to StopSprint must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 69 + 2179 + 2188 + 69 + 65 + 69 + 74 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 71 + 2210 + 2221 + 71 + 9 + 71 + 20 + False + + + The call to newCameraRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 73 + 2244 + 2260 + 73 + 9 + 73 + 25 + False + + + The call to cameraHolder must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 73 + 2264 + 2275 + 73 + 29 + 73 + 40 + False + + + The call to newCharacterRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 74 + 2313 + 2332 + 74 + 9 + 74 + 28 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 75 + 2382 + 2400 + 75 + 9 + 75 + 27 + False + + + The call to GetComponent<CharacterController> must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 75 + 2404 + 2436 + 75 + 31 + 75 + 63 + False + + + The call to cameraHeight must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 77 + 2452 + 2463 + 77 + 9 + 77 + 20 + False + + + The call to cameraHolder must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 77 + 2467 + 2478 + 77 + 24 + 77 + 35 + False + + + The call to currentWeapon must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 81 + 2551 + 2563 + 81 + 13 + 81 + 25 + False + + + The call to currentWeapon must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 79 + 2512 + 2524 + 79 + 13 + 79 + 25 + False + + + The method must have a documentation header. + 86 + False + + + The call to CalculateView must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 88 + 2656 + 2668 + 88 + 9 + 88 + 21 + False + + + The call to CalculateMovement must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 89 + 2682 + 2698 + 89 + 9 + 89 + 25 + False + + + The call to CalculateJump must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 90 + 2712 + 2724 + 90 + 9 + 90 + 21 + False + + + The call to CalculateCameraHeight must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 91 + 2738 + 2758 + 91 + 9 + 91 + 29 + False + + + The method must have a documentation header. + 94 + False + + + The call to newCharacterRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 96 + 2822 + 2841 + 96 + 9 + 96 + 28 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 96 + 2848 + 2861 + 96 + 35 + 96 + 48 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 96 + 2883 + 2896 + 96 + 70 + 96 + 83 + False + + + The call to input_View must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 96 + 2915 + 2924 + 96 + 102 + 96 + 111 + False + + + The call to input_View must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 96 + 2930 + 2939 + 96 + 117 + 96 + 126 + False + + + The call to newCharacterRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 97 + 3014 + 3033 + 97 + 52 + 97 + 71 + False + + + The call to newCameraRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 99 + 3056 + 3072 + 99 + 9 + 99 + 25 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 99 + 3079 + 3092 + 99 + 32 + 99 + 45 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 99 + 3114 + 3127 + 99 + 67 + 99 + 80 + False + + + The call to input_View must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 99 + 3145 + 3154 + 99 + 98 + 99 + 107 + False + + + The call to input_View must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 99 + 3161 + 3170 + 99 + 114 + 99 + 123 + False + + + The call to newCameraRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 100 + 3202 + 3218 + 100 + 9 + 100 + 25 + False + + + The call to newCameraRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 100 + 3236 + 3252 + 100 + 43 + 100 + 59 + False + + + The call to ViewClampYMin must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 100 + 3257 + 3269 + 100 + 64 + 100 + 76 + False + + + The call to ViewClampYMax must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 100 + 3272 + 3284 + 100 + 79 + 100 + 91 + False + + + The call to cameraHolder must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 102 + 3307 + 3318 + 102 + 9 + 102 + 20 + False + + + The call to newCameraRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 102 + 3353 + 3369 + 102 + 55 + 102 + 71 + False + + + The method must have a documentation header. + 105 + False + + + Variable names and private field names must start with a lower-case letter: MovementSpeed. + 147 + False + + + All method parameters must be placed on the same line, or each parameter must be placed on a separate line. + 142 + False + + + If the method parameters are on separate lines, the first parameter must begin on the line beneath the name of the method. + 142 + False + + + The parameter must begin on the line after the previous parameter. + 145 + False + + + All method parameters must be placed on the same line, or each parameter must be placed on a separate line. + 143 + False + + + If the method parameters are on separate lines, the first parameter must begin on the line beneath the name of the method. + 143 + False + + + The parameter must begin on the line after the previous parameter. + 144 + False + + + The call to isSprinting must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 109 + 3490 + 3500 + 109 + 13 + 109 + 23 + False + + + The call to input_Movement must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 107 + 3440 + 3453 + 107 + 13 + 107 + 26 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 112 + 3561 + 3574 + 112 + 29 + 112 + 42 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 113 + 3628 + 3641 + 113 + 31 + 113 + 44 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 117 + 3731 + 3744 + 117 + 29 + 117 + 42 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 118 + 3798 + 3811 + 118 + 31 + 118 + 44 + False + + + The call to isSprinting must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 115 + 3678 + 3688 + 115 + 13 + 115 + 23 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 124 + 3946 + 3959 + 124 + 13 + 124 + 26 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 124 + 3977 + 3990 + 124 + 44 + 124 + 57 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 122 + 3890 + 3908 + 122 + 14 + 122 + 32 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 128 + 4103 + 4116 + 128 + 13 + 128 + 26 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 128 + 4134 + 4147 + 128 + 44 + 128 + 57 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 126 + 4042 + 4053 + 126 + 17 + 126 + 28 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 132 + 4259 + 4272 + 132 + 13 + 132 + 26 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 132 + 4290 + 4303 + 132 + 44 + 132 + 57 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 130 + 4199 + 4210 + 130 + 17 + 130 + 28 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 136 + 4374 + 4387 + 136 + 13 + 136 + 26 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 139 + 4455 + 4468 + 139 + 26 + 139 + 39 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 140 + 4513 + 4526 + 140 + 28 + 140 + 41 + False + + + The call to newMovementSpeed must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 142 + 4562 + 4577 + 142 + 9 + 142 + 24 + False + + + The call to newMovementSpeed must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 142 + 4600 + 4615 + 142 + 47 + 142 + 62 + False + + + The call to input_Movement must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 143 + 4656 + 4669 + 143 + 38 + 143 + 51 + False + + + The call to input_Movement must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 144 + 4721 + 4734 + 144 + 30 + 144 + 43 + False + + + The call to newMovementSpeedVelocity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 145 + 4774 + 4797 + 145 + 17 + 145 + 40 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 145 + 4800 + 4818 + 145 + 43 + 145 + 61 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 145 + 4833 + 4846 + 145 + 76 + 145 + 89 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 145 + 4868 + 4881 + 145 + 111 + 145 + 124 + False + + + The call to newMovementSpeed must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 147 + 4970 + 4985 + 147 + 58 + 147 + 73 + False + + + The call to playerGravity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 151 + 5056 + 5068 + 151 + 13 + 151 + 25 + False + + + The call to gravityAmount must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 151 + 5073 + 5085 + 151 + 30 + 151 + 42 + False + + + The call to playerGravity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 149 + 5004 + 5016 + 149 + 13 + 149 + 25 + False + + + The call to gravityMin must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 149 + 5020 + 5029 + 149 + 29 + 149 + 38 + False + + + The call to playerGravity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 156 + 5212 + 5224 + 156 + 13 + 156 + 25 + False + + + The call to playerGravity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 154 + 5131 + 5143 + 154 + 13 + 154 + 25 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 154 + 5156 + 5174 + 154 + 38 + 154 + 56 + False + + + The call to playerGravity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 159 + 5276 + 5288 + 159 + 28 + 159 + 40 + False + + + The call to jumpingForce must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 160 + 5317 + 5328 + 160 + 26 + 160 + 37 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 162 + 5367 + 5385 + 162 + 9 + 162 + 27 + False + + + The spacing around the keyword 'if' is invalid. + 126 + 4039 + 4040 + 126 + 14 + 126 + 15 + False + + + The spacing around the keyword 'if' is invalid. + 130 + 4196 + 4197 + 130 + 14 + 130 + 15 + False + + + The method must have a documentation header. + 165 + False + + + The call to jumpingForce must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 167 + 5467 + 5478 + 167 + 9 + 167 + 20 + False + + + The call to jumpingForce must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 167 + 5501 + 5512 + 167 + 43 + 167 + 54 + False + + + The call to jumpingForceVelocity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 167 + 5533 + 5552 + 167 + 75 + 167 + 94 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 167 + 5555 + 5568 + 167 + 97 + 167 + 110 + False + + + The method must have a documentation header. + 170 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 186 + False + + + The call to playerStandStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 172 + 5673 + 5689 + 172 + 28 + 172 + 44 + False + + + The call to playerCrouchStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 176 + 5796 + 5813 + 176 + 28 + 176 + 45 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 174 + 5720 + 5731 + 174 + 13 + 174 + 24 + False + + + The call to playerProneStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 180 + 5933 + 5949 + 180 + 28 + 180 + 44 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 178 + 5858 + 5869 + 178 + 18 + 178 + 29 + False + + + The call to cameraHeight must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 183 + 5995 + 6006 + 183 + 9 + 183 + 20 + False + + + The call to cameraHolder must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 183 + 6027 + 6038 + 183 + 41 + 183 + 52 + False + + + The call to cameraHeightVelocity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 183 + 6075 + 6094 + 183 + 89 + 183 + 108 + False + + + The call to playerStanceSmoothing must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 183 + 6097 + 6117 + 183 + 111 + 183 + 131 + False + + + The call to cameraHolder must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 185 + 6132 + 6143 + 185 + 9 + 185 + 20 + False + + + The call to cameraHolder must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 185 + 6173 + 6184 + 185 + 50 + 185 + 61 + False + + + The call to cameraHeight must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 185 + 6203 + 6214 + 185 + 80 + 185 + 91 + False + + + The call to cameraHolder must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 185 + 6217 + 6228 + 185 + 94 + 185 + 105 + False + + + The method must have a documentation header. + 187 + False + + + Adjacent elements must be separated by a blank line. + 187 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 199 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 189 + 6301 + 6319 + 189 + 14 + 189 + 32 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 189 + 6335 + 6346 + 189 + 48 + 189 + 59 + False + + + The call to StanceCheck must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 196 + 6502 + 6512 + 196 + 17 + 196 + 27 + False + + + The call to playerStandStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 196 + 6514 + 6530 + 196 + 29 + 196 + 45 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 200 + 6624 + 6635 + 200 + 13 + 200 + 24 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 194 + 6437 + 6448 + 194 + 13 + 194 + 24 + False + + + The call to jumpingForce must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 205 + 6727 + 6738 + 205 + 9 + 205 + 20 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 205 + 6755 + 6768 + 205 + 37 + 205 + 50 + False + + + The call to playerGravity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 206 + 6794 + 6806 + 206 + 9 + 206 + 21 + False + + + The method must have a documentation header. + 209 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 216 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 219 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 223 + False + + + The call to StanceCheck must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 213 + 6934 + 6944 + 213 + 17 + 213 + 27 + False + + + The call to playerStandStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 213 + 6946 + 6962 + 213 + 29 + 213 + 45 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 217 + 7056 + 7067 + 217 + 13 + 217 + 24 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 211 + 6869 + 6880 + 211 + 13 + 211 + 24 + False + + + The call to StanceCheck must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 220 + 7136 + 7146 + 220 + 13 + 220 + 23 + False + + + The call to playerCrouchStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 220 + 7148 + 7165 + 220 + 25 + 220 + 42 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 224 + 7243 + 7254 + 224 + 9 + 224 + 20 + False + + + The method must have a documentation header. + 227 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 229 + 7330 + 7341 + 229 + 9 + 229 + 20 + False + + + The method must have a documentation header. + 232 + False + + + The code must not contain multiple blank lines in a row. + 237 + False + + + The call to feetTransform must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 234 + 7469 + 7481 + 234 + 33 + 234 + 45 + False + + + The call to feetTransform must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 234 + 7495 + 7507 + 234 + 59 + 234 + 71 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 234 + 7522 + 7540 + 234 + 86 + 234 + 104 + False + + + The call to stanceCheckErrorMargin must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 234 + 7551 + 7572 + 234 + 115 + 234 + 136 + False + + + The call to feetTransform must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 234 + 7575 + 7587 + 234 + 139 + 234 + 151 + False + + + The call to feetTransform must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 235 + 7633 + 7645 + 235 + 31 + 235 + 43 + False + + + The call to feetTransform must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 235 + 7659 + 7671 + 235 + 57 + 235 + 69 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 235 + 7686 + 7704 + 235 + 84 + 235 + 102 + False + + + The call to stanceCheckErrorMargin must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 235 + 7715 + 7736 + 235 + 113 + 235 + 134 + False + + + The call to feetTransform must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 235 + 7759 + 7771 + 235 + 157 + 235 + 169 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 238 + 7855 + 7873 + 238 + 49 + 238 + 67 + False + + + The call to playerMask must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 238 + 7883 + 7892 + 238 + 77 + 238 + 86 + False + + + The method must have a documentation header. + 241 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 247 + False + + + The call to isSprinting must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 245 + 8008 + 8018 + 245 + 13 + 245 + 23 + False + + + The call to input_Movement must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 243 + 7958 + 7971 + 243 + 13 + 243 + 26 + False + + + The call to isSprinting must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 248 + 8070 + 8080 + 248 + 9 + 248 + 19 + False + + + The call to isSprinting must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 248 + 8085 + 8095 + 248 + 24 + 248 + 34 + False + + + The method must have a documentation header. + 251 + False + + + The call to isSprinting must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 255 + 8216 + 8226 + 255 + 13 + 255 + 23 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 253 + 8162 + 8175 + 253 + 13 + 253 + 26 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.031 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 4 + False + + + class names begin with an upper-case letter: scr_Models. + 4 + False + + + The enum must have a documentation header. + 8 + False + + + The enumeration sub-item must have a documentation header. + 10 + False + + + The enumeration sub-item must have a documentation header. + 11 + False + + + The enumeration sub-item must have a documentation header. + 12 + False + + + The class must have a documentation header. + 16 + False + + + The field must have a documentation header. + 19 + False + + + Fields must be declared with private access. Use properties to expose fields. + 19 + False + + + The field must have a documentation header. + 20 + False + + + Fields must be declared with private access. Use properties to expose fields. + 20 + False + + + The field must have a documentation header. + 22 + False + + + Fields must be declared with private access. Use properties to expose fields. + 22 + False + + + The field must have a documentation header. + 23 + False + + + Fields must be declared with private access. Use properties to expose fields. + 23 + False + + + The field must have a documentation header. + 26 + False + + + Fields must be declared with private access. Use properties to expose fields. + 26 + False + + + The field must have a documentation header. + 27 + False + + + Fields must be declared with private access. Use properties to expose fields. + 27 + False + + + The field must have a documentation header. + 30 + False + + + Fields must be declared with private access. Use properties to expose fields. + 30 + False + + + The field must have a documentation header. + 31 + False + + + Fields must be declared with private access. Use properties to expose fields. + 31 + False + + + The field must have a documentation header. + 34 + False + + + Fields must be declared with private access. Use properties to expose fields. + 34 + False + + + The field must have a documentation header. + 35 + False + + + Fields must be declared with private access. Use properties to expose fields. + 35 + False + + + The field must have a documentation header. + 36 + False + + + Fields must be declared with private access. Use properties to expose fields. + 36 + False + + + The field must have a documentation header. + 39 + False + + + Fields must be declared with private access. Use properties to expose fields. + 39 + False + + + The field must have a documentation header. + 40 + False + + + Fields must be declared with private access. Use properties to expose fields. + 40 + False + + + The field must have a documentation header. + 41 + False + + + Fields must be declared with private access. Use properties to expose fields. + 41 + False + + + The field must have a documentation header. + 44 + False + + + Fields must be declared with private access. Use properties to expose fields. + 44 + False + + + The field must have a documentation header. + 45 + False + + + Fields must be declared with private access. Use properties to expose fields. + 45 + False + + + The field must have a documentation header. + 46 + False + + + Fields must be declared with private access. Use properties to expose fields. + 46 + False + + + The field must have a documentation header. + 47 + False + + + Fields must be declared with private access. Use properties to expose fields. + 47 + False + + + The class must have a documentation header. + 51 + False + + + The field must have a documentation header. + 53 + False + + + Fields must be declared with private access. Use properties to expose fields. + 53 + False + + + The field must have a documentation header. + 54 + False + + + Fields must be declared with private access. Use properties to expose fields. + 54 + False + + + The class must have a documentation header. + 62 + False + + + The field must have a documentation header. + 65 + False + + + Fields must be declared with private access. Use properties to expose fields. + 65 + False + + + The field must have a documentation header. + 66 + False + + + Fields must be declared with private access. Use properties to expose fields. + 66 + False + + + The field must have a documentation header. + 67 + False + + + Fields must be declared with private access. Use properties to expose fields. + 67 + False + + + The field must have a documentation header. + 68 + False + + + Fields must be declared with private access. Use properties to expose fields. + 68 + False + + + The field must have a documentation header. + 69 + False + + + Fields must be declared with private access. Use properties to expose fields. + 69 + False + + + The field must have a documentation header. + 70 + False + + + Fields must be declared with private access. Use properties to expose fields. + 70 + False + + + The field must have a documentation header. + 71 + False + + + Fields must be declared with private access. Use properties to expose fields. + 71 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 01:02:47.068 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + System using directives must be placed before all other using directives. + 3 + False + + + The class must have a documentation header. + 5 + False + + + The field must have a documentation header. + 7 + False + + + The property must have a documentation header. + 8 + False + + + Adjacent elements must be separated by a blank line. + 8 + False + + + The property must not be placed on a single line. The opening and closing curly brackets must each be placed on their own line. + 8 + False + + + All properties must be placed after all fields. + 10 + False + + + All properties must be placed after all fields. + 11 + False + + + All properties must be placed after all events. + 98 + False + + + The field must have a documentation header. + 10 + False + + + Variable names and private field names must start with a lower-case letter: DefendersTeam. + 10 + False + + + The field must have a documentation header. + 11 + False + + + Variable names and private field names must start with a lower-case letter: AttackersTeam. + 11 + False + + + The method must have a documentation header. + 13 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 16 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 18 + False + + + All private methods must be placed after all public methods. + 39 + False + + + All private methods must be placed after all public methods. + 54 + False + + + All private methods must be placed after all public methods. + 68 + False + + + The call to Destroy must begin with the 'this.', 'base.', 'object.' or 'GameManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 18 + 536 + 542 + 18 + 13 + 18 + 19 + False + + + The call to gameObject must begin with the 'this.', 'base.', 'object.' or 'GameManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 18 + 544 + 553 + 18 + 21 + 18 + 30 + False + + + The method must have a documentation header. + 21 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 33 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 35 + False + + + The call to ResetScene must begin with the 'this.', 'base.', 'object.' or 'GameManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 23 + 647 + 656 + 23 + 48 + 23 + 57 + False + + + The call to flagCaptured must begin with the 'this.', 'base.', 'object.' or 'GameManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 25 + 706 + 717 + 25 + 45 + 25 + 56 + False + + + The call to timeOut must begin with the 'this.', 'base.', 'object.' or 'GameManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 26 + 762 + 768 + 26 + 42 + 26 + 48 + False + + + The method must have a documentation header. + 39 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 43 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 45 + False + + + The body of the foreach statement must be wrapped in opening and closing curly brackets. + 49 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 50 + False + + + The method must have a documentation header. + 54 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 58 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 60 + False + + + The body of the foreach statement must be wrapped in opening and closing curly brackets. + 63 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 64 + False + + + The method must have a documentation header. + 68 + False + + + The method must have a documentation header. + 71 + False + + + method names begin with an upper-case letter: flagCaptured. + 71 + False + + + The spacing around the keyword 'switch' is invalid. + 73 + 2522 + 2527 + 73 + 9 + 73 + 14 + False + + + The method must have a documentation header. + 87 + False + + + method names begin with an upper-case letter: timeOut. + 87 + False + + + The method must have a documentation header. + 92 + False + + + The call to flagCaptured must begin with the 'this.', 'base.', 'object.' or 'GameManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 94 + 3031 + 3042 + 94 + 45 + 94 + 56 + False + + + The call to timeOut must begin with the 'this.', 'base.', 'object.' or 'GameManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 95 + 3087 + 3093 + 95 + 42 + 95 + 48 + False + + + The event must have a documentation header. + 98 + False + + + The method must have a documentation header. + 99 + False + + + Adjacent elements must be separated by a blank line. + 99 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.033 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 3 + False + + + The event must have a documentation header. + 5 + False + + + event names begin with an upper-case letter: onCaptureFlag. + 5 + False + + + The method must have a documentation header. + 7 + False + + + All methods must be placed after all events. + 13 + False + + + The event must have a documentation header. + 13 + False + + + event names begin with an upper-case letter: onTimeLeft. + 13 + False + + + The method must have a documentation header. + 14 + False + + + Adjacent elements must be separated by a blank line. + 14 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.16 19:49:10.120 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 4 + False + + + The property must have a documentation header. + 6 + False + + + property names begin with an upper-case letter: navPoints. + 6 + False + + + The method must have a documentation header. + 7 + False + + + Adjacent elements must be separated by a blank line. + 7 + False + + + A line may only contain a single statement. + 15 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.16 00:20:04.403 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 5 + False + + + The field must have a documentation header. + 7 + False + + + The property must have a documentation header. + 8 + False + + + Adjacent elements must be separated by a blank line. + 8 + False + + + The property must not be placed on a single line. The opening and closing curly brackets must each be placed on their own line. + 8 + False + + + The property must have a documentation header. + 10 + False + + + The method must have a documentation header. + 11 + False + + + Adjacent elements must be separated by a blank line. + 11 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 23 + False + + + The method must have an access modifier. + 11 + False + + + The call to Destroy must begin with the 'this.', 'base.', 'object.' or 'TimeManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 21 + 529 + 535 + 21 + 13 + 21 + 19 + False + + + The call to gameObject must begin with the 'this.', 'base.', 'object.' or 'TimeManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 21 + 537 + 546 + 21 + 21 + 21 + 30 + False + + + The method must have a documentation header. + 24 + False + + + Adjacent elements must be separated by a blank line. + 24 + False + + + The method must have an access modifier. + 24 + False + + + The call to CurrentTime must begin with the 'this.', 'base.', 'object.' or 'TimeManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 26 + 603 + 613 + 26 + 9 + 26 + 19 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.16 20:17:09.487 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 5 + False + + + A single-line comment must be preceded by a blank line or another single-line comment, or must be the first item in its scope. To ignore this error when commenting out a line of code, begin the comment with '////' rather than '//'. + 14 + False + + + A single-line comment must not be followed by a blank line. To ignore this error when commenting out a line of code, begin the comment with '////' rather than '//'. + 14 + False + + + The comment must start with a single space. To ignore this error when commenting out a line of code, begin the comment with '////' rather than '//'. + 14 + 391 + 414 + 14 + 5 + 14 + 28 + False + + + The property must have a documentation header. + 7 + False + + + property names begin with an upper-case letter: position. + 7 + False + + + All properties must be placed after all fields. + 11 + False + + + All properties must be placed after all fields. + 12 + False + + + All properties must be placed after all fields. + 13 + False + + + The property must have a documentation header. + 8 + False + + + Adjacent elements must be separated by a blank line. + 8 + False + + + The field must have a documentation header. + 11 + False + + + Fields must be declared with private access. Use properties to expose fields. + 11 + False + + + The field must have a documentation header. + 12 + False + + + Fields must be declared with private access. Use properties to expose fields. + 12 + False + + + The field must have a documentation header. + 13 + False + + + Fields must be declared with private access. Use properties to expose fields. + 13 + False + + + The method must have a documentation header. + 16 + False + + + The call to FlagDistance must begin with the 'this.', 'base.', 'object.' or 'NavPoint.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 18 + 460 + 471 + 18 + 9 + 18 + 20 + False + + + The call to position must begin with the 'this.', 'base.', 'object.' or 'NavPoint.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 18 + 538 + 545 + 18 + 87 + 18 + 94 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 01:00:02.216 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 4 + False + + + The spacing around the symbol '=' is invalid. + 3 + False + + + The field must have a documentation header. + 6 + False + + + Fields must be declared with private access. Use properties to expose fields. + 6 + False + + + The field must have a documentation header. + 8 + False + + + Fields must be declared with private access. Use properties to expose fields. + 8 + False + + + The field must have a documentation header. + 9 + False + + + Fields must be declared with private access. Use properties to expose fields. + 9 + False + + + The field must have a documentation header. + 12 + False + + + Fields must be declared with private access. Use properties to expose fields. + 12 + False + + + The field must have a documentation header. + 13 + False + + + Fields must be declared with private access. Use properties to expose fields. + 13 + False + + + The field must have a documentation header. + 15 + False + + + Fields must be declared with private access. Use properties to expose fields. + 15 + False + + + The field must have a documentation header. + 16 + False + + + Fields must be declared with private access. Use properties to expose fields. + 16 + False + + + The field must have a documentation header. + 17 + False + + + Fields must be declared with private access. Use properties to expose fields. + 17 + False + + + The field must have a documentation header. + 18 + False + + + Fields must be declared with private access. Use properties to expose fields. + 18 + False + + + The field must have a documentation header. + 19 + False + + + Fields must be declared with private access. Use properties to expose fields. + 19 + False + + + The field must have a documentation header. + 20 + False + + + Fields must be declared with private access. Use properties to expose fields. + 20 + False + + + The field must have a documentation header. + 22 + False + + + Fields must be declared with private access. Use properties to expose fields. + 22 + False + + + The field must have a documentation header. + 23 + False + + + Fields must be declared with private access. Use properties to expose fields. + 23 + False + + + The field must have a documentation header. + 24 + False + + + Fields must be declared with private access. Use properties to expose fields. + 24 + False + + + The field must have a documentation header. + 25 + False + + + Fields must be declared with private access. Use properties to expose fields. + 25 + False + + + The field must have a documentation header. + 27 + False + + + Fields must be declared with private access. Use properties to expose fields. + 27 + False + + + The field must have a documentation header. + 28 + False + + + Fields must be declared with private access. Use properties to expose fields. + 28 + False + + + The field must have a documentation header. + 29 + False + + + Fields must be declared with private access. Use properties to expose fields. + 29 + False + + + The field must have a documentation header. + 31 + False + + + Fields must be declared with private access. Use properties to expose fields. + 31 + False + + + The field must have a documentation header. + 33 + False + + + Fields must be declared with private access. Use properties to expose fields. + 33 + False + + + The field must have a documentation header. + 34 + False + + + Fields must be declared with private access. Use properties to expose fields. + 34 + False + + + The field must have a documentation header. + 35 + False + + + Fields must be declared with private access. Use properties to expose fields. + 35 + False + + + The field must have a documentation header. + 36 + False + + + Fields must be declared with private access. Use properties to expose fields. + 36 + False + + + The field must have a documentation header. + 37 + False + + + Fields must be declared with private access. Use properties to expose fields. + 37 + False + + + The field must have a documentation header. + 38 + False + + + Fields must be declared with private access. Use properties to expose fields. + 38 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 23:29:50.464 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 5 + False + + + The field must have a documentation header. + 7 + False + + + The property must have a documentation header. + 8 + False + + + Adjacent elements must be separated by a blank line. + 8 + False + + + The property must not be placed on a single line. The opening and closing curly brackets must each be placed on their own line. + 8 + False + + + All properties must be placed after all fields. + 15 + False + + + The method must have a documentation header. + 10 + False + + + All methods must be placed after all properties. + 16 + False + + + The field must have a documentation header. + 15 + False + + + The property must have a documentation header. + 16 + False + + + Adjacent elements must be separated by a blank line. + 16 + False + + + The property must not be placed on a single line. The opening and closing curly brackets must each be placed on their own line. + 16 + False + + + The call to gameSettings must begin with the 'this.', 'base.', 'object.' or 'SettingsReader.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 16 + 419 + 430 + 16 + 48 + 16 + 59 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.040 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 3 + False + + + The method must have a documentation header. + 5 + False + + + An opening curly bracket must not be followed by a blank line. + 6 + False + + + A closing curly bracket must not be preceded by a blank line. + 8 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 00:59:43.057 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 5 + False + + + The property must have a documentation header. + 7 + False + + + property names begin with an upper-case letter: type. + 7 + False + + + The method must have a documentation header. + 9 + False + + + The call to PickObject must begin with the 'this.', 'base.', 'object.' or 'AmmoPickUp.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 11 + 252 + 261 + 11 + 9 + 11 + 18 + False + + + The method must have a documentation header. + 14 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 00:59:39.476 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 5 + False + + + The property must have a documentation header. + 7 + False + + + property names begin with an upper-case letter: type. + 7 + False + + + The method must have a documentation header. + 9 + False + + + The call to PickObject must begin with the 'this.', 'base.', 'object.' or 'ArmourPickUp.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 11 + 250 + 259 + 11 + 9 + 11 + 18 + False + + + The method must have a documentation header. + 14 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 00:59:35.943 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 5 + False + + + The property must have a documentation header. + 7 + False + + + property names begin with an upper-case letter: type. + 7 + False + + + The method must have a documentation header. + 9 + False + + + The call to PickObject must begin with the 'this.', 'base.', 'object.' or 'HealthPickUp.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 11 + 250 + 259 + 11 + 9 + 11 + 18 + False + + + The method must have a documentation header. + 14 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.043 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The interface must have a documentation header. + 3 + False + + + Adjacent elements must be separated by a blank line. + 3 + False + + + The property must have a documentation header. + 5 + False + + + property names begin with an upper-case letter: type. + 5 + False + + + The method must have a documentation header. + 6 + False + + + Adjacent elements must be separated by a blank line. + 6 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 00:59:47.515 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 7 + False + + + The field must have a documentation header. + 9 + False + + + The property must have a documentation header. + 10 + False + + + Adjacent elements must be separated by a blank line. + 10 + False + + + The property must not be placed on a single line. The opening and closing curly brackets must each be placed on their own line. + 10 + False + + + All properties must be placed after all fields. + 12 + False + + + All properties must be placed after all fields. + 13 + False + + + All properties must be placed after all fields. + 14 + False + + + All properties must be placed after all fields. + 15 + False + + + All properties must be placed after all fields. + 17 + False + + + The call to instance must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 10 + 255 + 262 + 10 + 50 + 10 + 57 + False + + + The field must have a documentation header. + 12 + False + + + The field must have a documentation header. + 13 + False + + + The field must have a documentation header. + 14 + False + + + The field must have a documentation header. + 15 + False + + + The field must have a documentation header. + 17 + False + + + The method must have a documentation header. + 19 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 22 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 24 + False + + + The call to instance must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 22 + 612 + 619 + 22 + 13 + 22 + 20 + False + + + The call to instance must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 21 + 581 + 588 + 21 + 13 + 21 + 20 + False + + + The call to Destroy must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 24 + 656 + 662 + 24 + 13 + 24 + 19 + False + + + The call to gameObject must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 24 + 664 + 673 + 24 + 21 + 24 + 30 + False + + + The method must have a documentation header. + 27 + False + + + The body of the for statement must be wrapped in opening and closing curly brackets. + 32 + False + + + The body of the for statement must be wrapped in opening and closing curly brackets. + 34 + False + + + The body of the foreach statement must be wrapped in opening and closing curly brackets. + 40 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 38 + False + + + The call to pickups must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 29 + 728 + 734 + 29 + 9 + 29 + 15 + False + + + The call to pickups must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 32 + 890 + 896 + 32 + 13 + 32 + 19 + False + + + The call to healthPrefab must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 32 + 925 + 936 + 32 + 48 + 32 + 59 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 32 + 939 + 949 + 32 + 62 + 32 + 72 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 32 + 967 + 977 + 32 + 90 + 32 + 100 + False + + + The call to pickups must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 34 + 1085 + 1091 + 34 + 13 + 34 + 19 + False + + + The call to armourPrefab must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 34 + 1120 + 1131 + 34 + 48 + 34 + 59 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 34 + 1134 + 1144 + 34 + 62 + 34 + 72 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 34 + 1162 + 1172 + 34 + 90 + 34 + 100 + False + + + The call to pickups must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 37 + 1291 + 1297 + 37 + 13 + 37 + 19 + False + + + The call to ammoPrefab must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 37 + 1326 + 1335 + 37 + 48 + 37 + 57 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 37 + 1338 + 1348 + 37 + 60 + 37 + 70 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 37 + 1366 + 1376 + 37 + 88 + 37 + 98 + False + + + The call to pickups must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 39 + 1473 + 1479 + 39 + 33 + 39 + 39 + False + + + The call to StartCoroutine must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 42 + 1531 + 1544 + 42 + 9 + 42 + 22 + False + + + The call to SpawnNewPickUps must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 42 + 1546 + 1560 + 42 + 24 + 42 + 38 + False + + + The method must have a documentation header. + 45 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 58 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 55 + 1898 + 1908 + 55 + 47 + 55 + 57 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 55 + 1926 + 1936 + 55 + 75 + 55 + 85 + False + + + The call to IsDisableCheck must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 50 + 1703 + 1716 + 50 + 16 + 50 + 29 + False + + + The spacing around the keyword 'while' is invalid. + 47 + 1634 + 1638 + 47 + 9 + 47 + 13 + False + + + The spacing around the keyword 'if' is invalid. + 50 + 1700 + 1701 + 50 + 13 + 50 + 14 + False + + + The code contains multiple spaces in a row. Only one space is needed. + 59 + 2062 + 2063 + 59 + 29 + 59 + 30 + False + + + The method must have a documentation header. + 63 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 72 + False + + + The call to pickups must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 65 + 2196 + 2202 + 65 + 29 + 65 + 35 + False + + + The spacing around the keyword 'foreach' is invalid. + 65 + 2176 + 2182 + 65 + 9 + 65 + 15 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.044 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The enum must have a documentation header. + 1 + False + + + The enumeration sub-item must have a documentation header. + 3 + False + + + The enumeration sub-item must have a documentation header. + 4 + False + + + The enumeration sub-item must have a documentation header. + 5 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 22:38:30.693 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The code must not contain blank lines at the end of the file. + 4 + False + + + The code must not contain multiple blank lines in a row. + 4 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.046 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The enum must have a documentation header. + 1 + False + + + The enumeration sub-item must have a documentation header. + 3 + False + + + The enumeration sub-item must have a documentation header. + 4 + False + + + The enumeration sub-item must have a documentation header. + 5 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 00:44:55.312 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 1 + False + + + The method must have a documentation header. + 3 + False + + + Field names must not start with an underscore. + 3 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 16:12:15.866 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 11 + False + + + The field must have a documentation header. + 13 + False + + + The field must have a documentation header. + 14 + False + + + The field must have a documentation header. + 15 + False + + + The field must have a documentation header. + 16 + False + + + The field must have a documentation header. + 17 + False + + + The field must have a documentation header. + 18 + False + + + The field must have a documentation header. + 19 + False + + + The field must have a documentation header. + 20 + False + + + The field must have a documentation header. + 21 + False + + + The field must have a documentation header. + 23 + False + + + The property must have a documentation header. + 26 + False + + + The property must have a documentation header. + 31 + False + + + The indexer must have a documentation header. + 36 + False + + + The indexer must have a documentation header. + 47 + False + + + The constructor must have a documentation header. + 60 + False + + + The constructor must have a documentation header. + 65 + False + + + The constructor must have a documentation header. + 70 + False + + + The constructor must have a documentation header. + 75 + False + + + The constructor must have a documentation header. + 85 + False + + + The constructor must have a documentation header. + 90 + False + + + The method must have a documentation header. + 100 + False + + + The method must have a documentation header. + 122 + False + + + The method must have a documentation header. + 127 + False + + + The method must have a documentation header. + 146 + False + + + The method must have a documentation header. + 151 + False + + + The method must have a documentation header. + 190 + False + + + The method must have a documentation header. + 195 + False + + + The method must have a documentation header. + 226 + False + + + The method must have a documentation header. + 281 + False + + + The method must have a documentation header. + 297 + False + + + The method must have a documentation header. + 314 + False + + + The class must have a documentation header. + 325 + False + + + The field must have a documentation header. + 327 + False + + + The method must have a documentation header. + 403 + False + + + The method must have a documentation header. + 420 + False + + + The method must have a documentation header. + 439 + False + + + The property must have a documentation header. + 450 + False + + + The property must have a documentation header. + 455 + False + + + The method must have a documentation header. + 460 + False + + + The method must have a documentation header. + 465 + False + + + The method must have a documentation header. + 472 + False + + + The property must have a documentation header. + 490 + False + + + The method must have a documentation header. + 495 + False + + + The method must have a documentation header. + 500 + False + + + The method must have a documentation header. + 505 + False + + + The method must have a documentation header. + 510 + False + + + + + UNITY_2019_4_35;UNITY_2019_4;UNITY_2019;UNITY_5_3_OR_NEWER;UNITY_5_4_OR_NEWER;UNITY_5_5_OR_NEWER;UNITY_5_6_OR_NEWER;UNITY_2017_1_OR_NEWER;UNITY_2017_2_OR_NEWER;UNITY_2017_3_OR_NEWER;UNITY_2017_4_OR_NEWER;UNITY_2018_1_OR_NEWER;UNITY_2018_2_OR_NEWER;UNITY_2018_3_OR_NEWER;UNITY_2018_4_OR_NEWER;UNITY_2019_1_OR_NEWER;UNITY_2019_2_OR_NEWER;UNITY_2019_3_OR_NEWER;UNITY_2019_4_OR_NEWER;PLATFORM_ARCH_64;UNITY_64;UNITY_INCLUDE_TESTS;ENABLE_AR;ENABLE_AUDIO;ENABLE_CACHING;ENABLE_CLOTH;ENABLE_EVENT_QUEUE;ENABLE_MICROPHONE;ENABLE_MULTIPLE_DISPLAYS;ENABLE_PHYSICS;ENABLE_TEXTURE_STREAMING;ENABLE_UNET;ENABLE_LZMA;ENABLE_UNITYEVENTS;ENABLE_VR;ENABLE_WEBCAM;ENABLE_UNITYWEBREQUEST;ENABLE_WWW;ENABLE_CLOUD_SERVICES;ENABLE_CLOUD_SERVICES_COLLAB;ENABLE_CLOUD_SERVICES_COLLAB_SOFTLOCKS;ENABLE_CLOUD_SERVICES_ADS;ENABLE_CLOUD_SERVICES_USE_WEBREQUEST;ENABLE_CLOUD_SERVICES_CRASH_REPORTING;ENABLE_CLOUD_SERVICES_PURCHASING;ENABLE_CLOUD_SERVICES_ANALYTICS;ENABLE_CLOUD_SERVICES_UNET;ENABLE_CLOUD_SERVICES_BUILD;ENABLE_CLOUD_LICENSE;ENABLE_EDITOR_HUB_LICENSE;ENABLE_WEBSOCKET_CLIENT;ENABLE_DIRECTOR_AUDIO;ENABLE_DIRECTOR_TEXTURE;ENABLE_MANAGED_JOBS;ENABLE_MANAGED_TRANSFORM_JOBS;ENABLE_MANAGED_ANIMATION_JOBS;ENABLE_MANAGED_AUDIO_JOBS;INCLUDE_DYNAMIC_GI;ENABLE_MONO_BDWGC;ENABLE_SCRIPTING_GC_WBARRIERS;PLATFORM_SUPPORTS_MONO;RENDER_SOFTWARE_CURSOR;ENABLE_VIDEO;PLATFORM_STANDALONE;PLATFORM_STANDALONE_WIN;UNITY_STANDALONE_WIN;UNITY_STANDALONE;ENABLE_RUNTIME_GI;ENABLE_MOVIES;ENABLE_NETWORK;ENABLE_CRUNCH_TEXTURE_COMPRESSION;ENABLE_OUT_OF_PROCESS_CRASH_HANDLER;ENABLE_CLUSTER_SYNC;ENABLE_CLUSTERINPUT;GFXDEVICE_WAITFOREVENT_MESSAGEPUMP;ENABLE_WEBSOCKET_HOST;ENABLE_MONO;NET_4_6;ENABLE_PROFILER;DEBUG;TRACE;UNITY_ASSERTIONS;UNITY_EDITOR;UNITY_EDITOR_64;UNITY_EDITOR_WIN;ENABLE_UNITY_COLLECTIONS_CHECKS;ENABLE_BURST_AOT;UNITY_TEAM_LICENSE;ENABLE_CUSTOM_RENDER_TEXTURE;ENABLE_DIRECTOR;ENABLE_LOCALIZATION;ENABLE_SPRITES;ENABLE_TERRAIN;ENABLE_TILEMAP;ENABLE_TIMELINE;ENABLE_INPUT_SYSTEM;ENABLE_LEGACY_INPUT_MANAGER;CSHARP_7_OR_LATER;CSHARP_7_3_OR_NEWER + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.28 18:30:19.041 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + A syntax error has been discovered in file F:\SigmaRiskManagment\real shooter Git Version\Assets\Scripts\Character\NPC.cs on line 87. + 87 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.28 18:30:19.035 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 5 + False + + + The property must have a documentation header. + 8 + False + + + All properties must be placed after all fields. + 10 + False + + + All properties must be placed after all fields. + 11 + False + + + All properties must be placed after all fields. + 14 + False + + + The property must have a documentation header. + 9 + False + + + Adjacent elements must be separated by a blank line. + 9 + False + + + The field must have a documentation header. + 10 + False + + + Adjacent elements must be separated by a blank line. + 10 + False + + + The field must have a documentation header. + 11 + False + + + The property must have a documentation header. + 12 + False + + + Adjacent elements must be separated by a blank line. + 12 + False + + + The call to occupDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 12 + 362 + 375 + 12 + 33 + 12 + 46 + False + + + The call to occupAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 12 + 384 + 397 + 12 + 55 + 12 + 68 + False + + + The spacing around the symbol '>' is invalid. + 12 + 376 + 376 + 12 + 47 + 12 + 47 + False + + + The spacing around the symbol '>' is invalid. + 12 + False + + + The spacing around the symbol '>' is invalid. + 12 + 398 + 398 + 12 + 69 + 12 + 69 + False + + + The property must have a documentation header. + 13 + False + + + Adjacent elements must be separated by a blank line. + 13 + False + + + The call to occupDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 13 + 435 + 448 + 13 + 32 + 13 + 45 + False + + + The call to occupAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 13 + 460 + 473 + 13 + 57 + 13 + 70 + False + + + The field must have a documentation header. + 14 + False + + + Adjacent elements must be separated by a blank line. + 14 + False + + + The method must have a documentation header. + 16 + False + + + An opening curly bracket must not be followed by a blank line. + 17 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 24 + False + + + The call to timeForWin must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 19 + 559 + 568 + 19 + 9 + 19 + 18 + False + + + The call to TimeStayAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 20 + 628 + 644 + 20 + 9 + 20 + 25 + False + + + The call to TimeStayDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 21 + 660 + 676 + 21 + 9 + 21 + 25 + False + + + The call to occupAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 22 + 692 + 705 + 22 + 9 + 22 + 22 + False + + + The call to occupDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 23 + 721 + 734 + 23 + 9 + 23 + 22 + False + + + The method must have a documentation header. + 25 + False + + + Adjacent elements must be separated by a blank line. + 25 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 39 + False + + + The call to occupDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 30 + 889 + 902 + 30 + 17 + 30 + 30 + False + + + The call to occupAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 33 + 978 + 991 + 33 + 17 + 33 + 30 + False + + + The spacing around the keyword 'switch' is invalid. + 27 + 813 + 818 + 27 + 9 + 27 + 14 + False + + + The method must have a documentation header. + 40 + False + + + Adjacent elements must be separated by a blank line. + 40 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 54 + False + + + The call to occupDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 45 + 1287 + 1300 + 45 + 17 + 45 + 30 + False + + + The call to occupAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 48 + 1376 + 1389 + 48 + 17 + 48 + 30 + False + + + The method must have a documentation header. + 55 + False + + + Adjacent elements must be separated by a blank line. + 55 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 67 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 73 + False + + + The call to TimeStayAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 59 + 1641 + 1657 + 59 + 13 + 59 + 29 + False + + + The call to TimeStayDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 60 + 1677 + 1693 + 60 + 13 + 60 + 29 + False + + + The call to IsOccupBoth must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 57 + 1590 + 1600 + 57 + 13 + 57 + 23 + False + + + The call to IsNotOccup must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 57 + 1605 + 1614 + 57 + 28 + 57 + 37 + False + + + The call to TimeStayAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 65 + 1794 + 1810 + 65 + 13 + 65 + 29 + False + + + The call to TimeStayAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 66 + 1848 + 1864 + 66 + 17 + 66 + 33 + False + + + The call to timeForWin must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 66 + 1868 + 1877 + 66 + 37 + 66 + 46 + False + + + The call to occupAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 63 + 1750 + 1763 + 63 + 18 + 63 + 31 + False + + + The call to TimeStayDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 71 + 1999 + 2015 + 71 + 13 + 71 + 29 + False + + + The call to TimeStayDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 72 + 2053 + 2069 + 72 + 17 + 72 + 33 + False + + + The call to timeForWin must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 72 + 2073 + 2082 + 72 + 37 + 72 + 46 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.050 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 4 + False + + + + \ No newline at end of file From 690f1e253602c6fded329ceca14cc25d3b4ce70f Mon Sep 17 00:00:00 2001 From: Andrey Gumirov Date: Thu, 5 May 2022 01:08:58 +0700 Subject: [PATCH 12/27] test --- 1 | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 1 diff --git a/1 b/1 new file mode 100644 index 0000000..e69de29 From 351fa8ee12c499a0374be71a192a5f288611f467 Mon Sep 17 00:00:00 2001 From: Andrey Gumirov Date: Thu, 5 May 2022 01:10:04 +0700 Subject: [PATCH 13/27] test 1 --- 1 | 1 + 1 file changed, 1 insertion(+) diff --git a/1 b/1 index e69de29..8b13789 100644 --- a/1 +++ b/1 @@ -0,0 +1 @@ + From cfb7b71a8a61ae44419070002cfd73049c278d93 Mon Sep 17 00:00:00 2001 From: Andrey Gumirov Date: Thu, 5 May 2022 15:37:51 +0700 Subject: [PATCH 14/27] Fixed merge errors --- .../Scripts/Character/CharacterCondition.cs | 10 ----- .../Scripts/Character/MovementController.cs | 25 ----------- Assets/Scripts/Character/NPC.cs | 45 ------------------- Assets/Scripts/Misc/NavPoint.cs | 9 ---- 4 files changed, 89 deletions(-) diff --git a/Assets/Scripts/Character/CharacterCondition.cs b/Assets/Scripts/Character/CharacterCondition.cs index 1efa341..de60740 100755 --- a/Assets/Scripts/Character/CharacterCondition.cs +++ b/Assets/Scripts/Character/CharacterCondition.cs @@ -9,22 +9,12 @@ public class CharacterCondition public event Action OnChangeAmmunitionEvent; private int health; -<<<<<<< HEAD public int HealthPoints { get { return health; } -======= - - public int HealthPoints - { - get - { - return health; - } ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 private set { health = value; diff --git a/Assets/Scripts/Character/MovementController.cs b/Assets/Scripts/Character/MovementController.cs index 3ade88a..0022357 100644 --- a/Assets/Scripts/Character/MovementController.cs +++ b/Assets/Scripts/Character/MovementController.cs @@ -6,18 +6,11 @@ using UnityEngine.AI; [RequireComponent(typeof(NavMeshAgent))] public class MovementController : MonoBehaviour { -<<<<<<< HEAD public int PointStartID { get; set; } public int PointEndID { get; private set; } public float FlagDistance { get; private set; } private const float updateFlagPositionDelay = 5; private const float updateReachedDestinationDelay = 5; -======= - public NavPoint currentPosition { get; set; } - private Dictionary navPoints = new Dictionary(); - - [SerializeField] private NavMeshAgent navMeshAgent; ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 [SerializeField] private NavMeshAgent navMeshAgent; [SerializeField] private GameObject flag; @@ -28,7 +21,6 @@ public class MovementController : MonoBehaviour private void Awake() { -<<<<<<< HEAD navMeshAgent.speed = SettingsReader.Instance.GetSettings.MovementSpeed; idNavPointDict = MapManager.IDToNavPoint; InvokeRepeating(nameof(UpdateFlagPosition), 0, updateFlagPositionDelay); @@ -43,12 +35,6 @@ public class MovementController : MonoBehaviour private void UpdateFlagPosition() { FlagDistance = (flag.transform.position - gameObject.transform.position).magnitude; -======= - navMeshAgent.speed = SettingsReader.Instance.GetSettings.movementSpeed; - foreach (var np in MapManager.navPoints) { - navPoints[np.PointId] = np; - } ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 } public void MoveToPointById(int id) @@ -62,27 +48,16 @@ public class MovementController : MonoBehaviour } public void MoveToRandomPoint() -<<<<<<< HEAD { Debug.Log(MapManager.NavPoints == null); GoToNextNavPoint(MapManager.NavPoints[Random.Range(0, MapManager.NavPoints.Count)]); -======= - { - // Debug.Log(MapManager.navPoints == null); - goToNextNavPoint(MapManager.navPoints[Random.Range(0, MapManager.navPoints.Count)]); ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 } public List GetPointsCandidate() { -<<<<<<< HEAD return MapManager.NavPoints .Where(point => (idNavPointDict[PointStartID].Position - point.Position).magnitude < SettingsReader.Instance.GetSettings.MovementDistance) -======= - return MapManager.navPoints - .Where(point => (currentPosition.position - point.position).magnitude < SettingsReader.Instance.GetSettings.movementDistance) ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 .ToList(); } diff --git a/Assets/Scripts/Character/NPC.cs b/Assets/Scripts/Character/NPC.cs index 932379c..d47e377 100644 --- a/Assets/Scripts/Character/NPC.cs +++ b/Assets/Scripts/Character/NPC.cs @@ -70,14 +70,7 @@ public class NPC : Agent, ICharacter public override void CollectObservations(VectorSensor sensor) { -<<<<<<< HEAD var candidates = moveController.GetPointsCandidate(); -======= - sensor.AddObservation(Condition.HealthPoints); - sensor.AddObservation(Condition.ArmourPoints); - sensor.AddObservation(Condition.Ammunition); - sensor.AddObservation((int) NPC_State.State); ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 //common sensors sensor.AddObservation(GameManager.IsHaveSeenByEnemy(AgentCharacter.Team.GetOppositeTeam(), @@ -100,7 +93,6 @@ public class NPC : Agent, ICharacter //point sensors foreach (var point in candidates) { -<<<<<<< HEAD bufferSensor.AppendObservation(new float[] { point.DeathAttr, (int)point.navType, @@ -112,35 +104,6 @@ public class NPC : Agent, ICharacter GameManager.IsHaveSeenByEnemy(AgentCharacter.Team.GetOppositeTeam(), point.Position).ToInt() }); -======= - var parray = new float[] - { - //1 position in navpointId - (float) moveController.currentPosition.PointId, - //2 distance to flag - moveController.currentPosition.FlagDistance, - //3 death count in point - moveController.currentPosition.DeathAttr, - //4 flagEnemyDistance - GameManager.IsCloserToFlagFromNextNavPoint(point, transform.position) == true ? 1 : 0, - //5 EnemyVsNavPointDistance - GameManager.IsCloserToEnemyThanToNextNavPoint(point, transform.position, AgentCharacter.Team) == true - ? 1 - : 0 - }; - // var _parray = string.Join(" ", parray); - // Debug.Log("OBS: " + _parray); - bufferSensor.AppendObservation(parray); - } - } - - public override void Heuristic(in ActionBuffers actionsOut) - { - var discreteActionsOut = actionsOut.DiscreteActions; - if (Input.GetKeyDown(KeyCode.W)) - { - discreteActionsOut[0] = 1; ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 } } @@ -149,7 +112,6 @@ public class NPC : Agent, ICharacter var result = actions.DiscreteActions; if (result[0] == 0) { -<<<<<<< HEAD if (navPointIdDict[moveController.PointStartID].navType != NavPointType.Cover) return; NpcState = CoverState; @@ -185,13 +147,6 @@ public class NPC : Agent, ICharacter case 1: moveController.ReturnToStartPoint(); NpcState = RunningState; break; default: throw new ArgumentException("Undefined Action recieved"); } -======= - moveController.MoveToRandomPoint(); - NPC_State = RunningState; - } else if (actions.DiscreteActions[0] == 2) - { - moveController.MoveToPointById(actions.DiscreteActions[1]); ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 } } #endregion diff --git a/Assets/Scripts/Misc/NavPoint.cs b/Assets/Scripts/Misc/NavPoint.cs index 473d7fa..a70fd61 100755 --- a/Assets/Scripts/Misc/NavPoint.cs +++ b/Assets/Scripts/Misc/NavPoint.cs @@ -17,21 +17,12 @@ public class NavPoint : MonoBehaviour public NavPointType navType = NavPointType.Direction; [HideInInspector] -<<<<<<< HEAD public int PointId = 0; -======= - public int PointId; ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 public float DeathAttr = 0; public List EnemiesSeen = new List(); private void Start() { -<<<<<<< HEAD FlagDistance = (GameObject.FindGameObjectWithTag("Flag").transform.position - Position).magnitude; -======= - PointId = GetInstanceID(); - FlagDistance = (GameObject.FindGameObjectWithTag("Flag").transform.position - position).magnitude; ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 } } From f3fe5ea4cd8ef270fbfc6c1be90b25b6576f5a93 Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Thu, 5 May 2022 16:26:23 +0700 Subject: [PATCH 15/27] commit 665 --- Assets/Prefabs/Player.prefab.meta | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Assets/Prefabs/Player.prefab.meta b/Assets/Prefabs/Player.prefab.meta index 9180d60..1fa6659 100755 --- a/Assets/Prefabs/Player.prefab.meta +++ b/Assets/Prefabs/Player.prefab.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 99bb17a23a489624baeaf337f91a4f84 +guid: 80f6c1c85e5daed4c96c70205ed5503d PrefabImporter: externalObjects: {} userData: From ba1b350c6283b0ebb3d6e48bf7de66db8f6503a6 Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Thu, 5 May 2022 16:49:21 +0700 Subject: [PATCH 16/27] resolved merge conflicts --- .../Scripts/Character/CharacterCondition.cs | 9 ---- .../Scripts/Character/MovementController.cs | 26 ----------- Assets/Scripts/Character/NPC.cs | 45 ------------------- Assets/Scripts/Misc/NavPoint.cs | 9 ---- 4 files changed, 89 deletions(-) diff --git a/Assets/Scripts/Character/CharacterCondition.cs b/Assets/Scripts/Character/CharacterCondition.cs index 1efa341..8d7d5c5 100755 --- a/Assets/Scripts/Character/CharacterCondition.cs +++ b/Assets/Scripts/Character/CharacterCondition.cs @@ -9,14 +9,6 @@ public class CharacterCondition public event Action OnChangeAmmunitionEvent; private int health; -<<<<<<< HEAD - public int HealthPoints - { - get - { - return health; - } -======= public int HealthPoints { @@ -24,7 +16,6 @@ public class CharacterCondition { return health; } ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 private set { health = value; diff --git a/Assets/Scripts/Character/MovementController.cs b/Assets/Scripts/Character/MovementController.cs index 3ade88a..b1849ce 100644 --- a/Assets/Scripts/Character/MovementController.cs +++ b/Assets/Scripts/Character/MovementController.cs @@ -6,18 +6,11 @@ using UnityEngine.AI; [RequireComponent(typeof(NavMeshAgent))] public class MovementController : MonoBehaviour { -<<<<<<< HEAD public int PointStartID { get; set; } public int PointEndID { get; private set; } public float FlagDistance { get; private set; } private const float updateFlagPositionDelay = 5; private const float updateReachedDestinationDelay = 5; -======= - public NavPoint currentPosition { get; set; } - private Dictionary navPoints = new Dictionary(); - - [SerializeField] private NavMeshAgent navMeshAgent; ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 [SerializeField] private NavMeshAgent navMeshAgent; [SerializeField] private GameObject flag; @@ -28,7 +21,6 @@ public class MovementController : MonoBehaviour private void Awake() { -<<<<<<< HEAD navMeshAgent.speed = SettingsReader.Instance.GetSettings.MovementSpeed; idNavPointDict = MapManager.IDToNavPoint; InvokeRepeating(nameof(UpdateFlagPosition), 0, updateFlagPositionDelay); @@ -43,12 +35,6 @@ public class MovementController : MonoBehaviour private void UpdateFlagPosition() { FlagDistance = (flag.transform.position - gameObject.transform.position).magnitude; -======= - navMeshAgent.speed = SettingsReader.Instance.GetSettings.movementSpeed; - foreach (var np in MapManager.navPoints) { - navPoints[np.PointId] = np; - } ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 } public void MoveToPointById(int id) @@ -62,27 +48,15 @@ public class MovementController : MonoBehaviour } public void MoveToRandomPoint() -<<<<<<< HEAD { - Debug.Log(MapManager.NavPoints == null); GoToNextNavPoint(MapManager.NavPoints[Random.Range(0, MapManager.NavPoints.Count)]); -======= - { - // Debug.Log(MapManager.navPoints == null); - goToNextNavPoint(MapManager.navPoints[Random.Range(0, MapManager.navPoints.Count)]); ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 } public List GetPointsCandidate() { -<<<<<<< HEAD return MapManager.NavPoints .Where(point => (idNavPointDict[PointStartID].Position - point.Position).magnitude < SettingsReader.Instance.GetSettings.MovementDistance) -======= - return MapManager.navPoints - .Where(point => (currentPosition.position - point.position).magnitude < SettingsReader.Instance.GetSettings.movementDistance) ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 .ToList(); } diff --git a/Assets/Scripts/Character/NPC.cs b/Assets/Scripts/Character/NPC.cs index 932379c..d47e377 100644 --- a/Assets/Scripts/Character/NPC.cs +++ b/Assets/Scripts/Character/NPC.cs @@ -70,14 +70,7 @@ public class NPC : Agent, ICharacter public override void CollectObservations(VectorSensor sensor) { -<<<<<<< HEAD var candidates = moveController.GetPointsCandidate(); -======= - sensor.AddObservation(Condition.HealthPoints); - sensor.AddObservation(Condition.ArmourPoints); - sensor.AddObservation(Condition.Ammunition); - sensor.AddObservation((int) NPC_State.State); ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 //common sensors sensor.AddObservation(GameManager.IsHaveSeenByEnemy(AgentCharacter.Team.GetOppositeTeam(), @@ -100,7 +93,6 @@ public class NPC : Agent, ICharacter //point sensors foreach (var point in candidates) { -<<<<<<< HEAD bufferSensor.AppendObservation(new float[] { point.DeathAttr, (int)point.navType, @@ -112,35 +104,6 @@ public class NPC : Agent, ICharacter GameManager.IsHaveSeenByEnemy(AgentCharacter.Team.GetOppositeTeam(), point.Position).ToInt() }); -======= - var parray = new float[] - { - //1 position in navpointId - (float) moveController.currentPosition.PointId, - //2 distance to flag - moveController.currentPosition.FlagDistance, - //3 death count in point - moveController.currentPosition.DeathAttr, - //4 flagEnemyDistance - GameManager.IsCloserToFlagFromNextNavPoint(point, transform.position) == true ? 1 : 0, - //5 EnemyVsNavPointDistance - GameManager.IsCloserToEnemyThanToNextNavPoint(point, transform.position, AgentCharacter.Team) == true - ? 1 - : 0 - }; - // var _parray = string.Join(" ", parray); - // Debug.Log("OBS: " + _parray); - bufferSensor.AppendObservation(parray); - } - } - - public override void Heuristic(in ActionBuffers actionsOut) - { - var discreteActionsOut = actionsOut.DiscreteActions; - if (Input.GetKeyDown(KeyCode.W)) - { - discreteActionsOut[0] = 1; ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 } } @@ -149,7 +112,6 @@ public class NPC : Agent, ICharacter var result = actions.DiscreteActions; if (result[0] == 0) { -<<<<<<< HEAD if (navPointIdDict[moveController.PointStartID].navType != NavPointType.Cover) return; NpcState = CoverState; @@ -185,13 +147,6 @@ public class NPC : Agent, ICharacter case 1: moveController.ReturnToStartPoint(); NpcState = RunningState; break; default: throw new ArgumentException("Undefined Action recieved"); } -======= - moveController.MoveToRandomPoint(); - NPC_State = RunningState; - } else if (actions.DiscreteActions[0] == 2) - { - moveController.MoveToPointById(actions.DiscreteActions[1]); ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 } } #endregion diff --git a/Assets/Scripts/Misc/NavPoint.cs b/Assets/Scripts/Misc/NavPoint.cs index 473d7fa..a70fd61 100755 --- a/Assets/Scripts/Misc/NavPoint.cs +++ b/Assets/Scripts/Misc/NavPoint.cs @@ -17,21 +17,12 @@ public class NavPoint : MonoBehaviour public NavPointType navType = NavPointType.Direction; [HideInInspector] -<<<<<<< HEAD public int PointId = 0; -======= - public int PointId; ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 public float DeathAttr = 0; public List EnemiesSeen = new List(); private void Start() { -<<<<<<< HEAD FlagDistance = (GameObject.FindGameObjectWithTag("Flag").transform.position - Position).magnitude; -======= - PointId = GetInstanceID(); - FlagDistance = (GameObject.FindGameObjectWithTag("Flag").transform.position - position).magnitude; ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 } } From 2caf658d190d294de644e3042211c96566daa261 Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Thu, 5 May 2022 18:11:57 +0700 Subject: [PATCH 17/27] "update unresolved conflicts" --- Assets/Scripts/Character/CharacterCondition.cs | 1 - Assets/Scripts/Character/MovementController.cs | 10 ---------- Assets/Scripts/Managers/MapManager.cs | 2 -- 3 files changed, 13 deletions(-) diff --git a/Assets/Scripts/Character/CharacterCondition.cs b/Assets/Scripts/Character/CharacterCondition.cs index 8d7d5c5..2124ddb 100755 --- a/Assets/Scripts/Character/CharacterCondition.cs +++ b/Assets/Scripts/Character/CharacterCondition.cs @@ -9,7 +9,6 @@ public class CharacterCondition public event Action OnChangeAmmunitionEvent; private int health; - public int HealthPoints { get diff --git a/Assets/Scripts/Character/MovementController.cs b/Assets/Scripts/Character/MovementController.cs index b1849ce..70341bb 100644 --- a/Assets/Scripts/Character/MovementController.cs +++ b/Assets/Scripts/Character/MovementController.cs @@ -36,16 +36,6 @@ public class MovementController : MonoBehaviour { FlagDistance = (flag.transform.position - gameObject.transform.position).magnitude; } - - public void MoveToPointById(int id) - { - if (!navPoints.ContainsKey(id)) - { - Debug.LogWarning("PIDOR"); - return; - } - goToNextNavPoint(navPoints[id]); - } public void MoveToRandomPoint() { diff --git a/Assets/Scripts/Managers/MapManager.cs b/Assets/Scripts/Managers/MapManager.cs index b28dcba..58e5c77 100755 --- a/Assets/Scripts/Managers/MapManager.cs +++ b/Assets/Scripts/Managers/MapManager.cs @@ -5,8 +5,6 @@ public class MapManager : MonoBehaviour { private static MapManager instance; public static MapManager Instance => instance; - private static List navPoints = new List(); - private static Dictionary iDToNavPoint = new Dictionary(); public static List NavPoints { get => navPoints; private set => navPoints = value; } public static Dictionary IDToNavPoint { get => iDToNavPoint; private set => iDToNavPoint = value; } From b8e8e74ab448c9126b0c22b32ed7b1d25507d97d Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Thu, 5 May 2022 20:45:26 +0700 Subject: [PATCH 18/27] Fixes in Map Manager --- Assets/Scripts/Managers/GameManager.cs | 8 ++++---- Assets/Scripts/Managers/GlobalEventManager.cs | 12 ++++++------ Assets/Scripts/Managers/MapManager.cs | 5 +++-- Assets/Scripts/Statistics/Logger.cs | 14 +++++++------- Assets/Scripts/Statistics/StatisticManager.cs | 4 ++-- 5 files changed, 22 insertions(+), 21 deletions(-) diff --git a/Assets/Scripts/Managers/GameManager.cs b/Assets/Scripts/Managers/GameManager.cs index b6467d8..9299c9b 100755 --- a/Assets/Scripts/Managers/GameManager.cs +++ b/Assets/Scripts/Managers/GameManager.cs @@ -25,8 +25,8 @@ public class GameManager : MonoBehaviour { Academy.Instance.OnEnvironmentReset += ResetScene; - GlobalEventManager.onCaptureFlag += FlagCaptured; - GlobalEventManager.onTimeLeft += TimeOut; + GlobalEventManager.OnCaptureFlag += FlagCaptured; + GlobalEventManager.OnTimeLeft += TimeOut; var agents = GameObject.FindObjectsOfType(); foreach (var item in agents) @@ -138,8 +138,8 @@ public class GameManager : MonoBehaviour private void OnDestroy() { - GlobalEventManager.onCaptureFlag -= FlagCaptured; - GlobalEventManager.onTimeLeft -= TimeOut; + GlobalEventManager.OnCaptureFlag -= FlagCaptured; + GlobalEventManager.OnTimeLeft -= TimeOut; } public static event Action OnResetScene; diff --git a/Assets/Scripts/Managers/GlobalEventManager.cs b/Assets/Scripts/Managers/GlobalEventManager.cs index e0461af..753b7d2 100755 --- a/Assets/Scripts/Managers/GlobalEventManager.cs +++ b/Assets/Scripts/Managers/GlobalEventManager.cs @@ -2,18 +2,18 @@ public class GlobalEventManager { - public static event Action onCaptureFlag; + public static event Action OnCaptureFlag; public static void SendCaptureFlag(Team team) { - onCaptureFlag?.Invoke(team); - onCaptureFlag = null; + OnCaptureFlag?.Invoke(team); + OnCaptureFlag = null; } - public static event Action onTimeLeft; + public static event Action OnTimeLeft; public static void SendTimeout() { - onTimeLeft?.Invoke(); - onTimeLeft = null; + OnTimeLeft?.Invoke(); + OnTimeLeft = null; } } diff --git a/Assets/Scripts/Managers/MapManager.cs b/Assets/Scripts/Managers/MapManager.cs index 58e5c77..14d70af 100755 --- a/Assets/Scripts/Managers/MapManager.cs +++ b/Assets/Scripts/Managers/MapManager.cs @@ -5,8 +5,9 @@ public class MapManager : MonoBehaviour { private static MapManager instance; public static MapManager Instance => instance; - public static List NavPoints { get => navPoints; private set => navPoints = value; } - public static Dictionary IDToNavPoint { get => iDToNavPoint; private set => iDToNavPoint = value; } + [SerializeField] static List _navPoints; + public static List NavPoints { get => _navPoints; private set => _navPoints = value; } + public static Dictionary IDToNavPoint {get; private set; } private void Awake() { diff --git a/Assets/Scripts/Statistics/Logger.cs b/Assets/Scripts/Statistics/Logger.cs index e293d25..df7ce3d 100644 --- a/Assets/Scripts/Statistics/Logger.cs +++ b/Assets/Scripts/Statistics/Logger.cs @@ -3,17 +3,17 @@ using UnityEngine; public class Logger { - private const string directory = "/Logs/"; - private const string baseName = "Log#"; + private const string Directory = "/Logs/"; + private const string BaseName = "Log#"; public static void SaveLog(T objToSerialize) { - string dir = Application.persistentDataPath + directory; - if (!Directory.Exists(dir)) - Directory.CreateDirectory(dir); + var dir = Application.persistentDataPath + Directory; + if (!System.IO.Directory.Exists(dir)) + System.IO.Directory.CreateDirectory(dir); - var logName = baseName + (Directory.GetFiles(dir).Length + 1).ToString(); - string json = JsonUtility.ToJson(objToSerialize); + var logName = BaseName + (System.IO.Directory.GetFiles(dir).Length + 1).ToString(); + var json = JsonUtility.ToJson(objToSerialize); File.WriteAllText(dir + logName, json); } } \ No newline at end of file diff --git a/Assets/Scripts/Statistics/StatisticManager.cs b/Assets/Scripts/Statistics/StatisticManager.cs index 399e7fb..a9c4883 100644 --- a/Assets/Scripts/Statistics/StatisticManager.cs +++ b/Assets/Scripts/Statistics/StatisticManager.cs @@ -19,8 +19,8 @@ public class StatisticManager : MonoBehaviour foreach (var npc in GameObject.FindObjectsOfType()) npc.OnDamageRecieved += RegisterDamage; - GlobalEventManager.onCaptureFlag += RegisterWin; - GlobalEventManager.onTimeLeft += RegisterTimeOut; + GlobalEventManager.OnCaptureFlag += RegisterWin; + GlobalEventManager.OnTimeLeft += RegisterTimeOut; } private void RegisterDamage(int damage, Team team) From 3908334f656b248dc1f2eb4fe5a7918685dd472e Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Sat, 7 May 2022 21:45:28 +0700 Subject: [PATCH 19/27] Finally fixed --- Assets/ML-Agents/Timers/Greatest_map_ever_timers.json.meta | 7 ------- ProjectSettings/ProjectVersion.txt | 2 -- 2 files changed, 9 deletions(-) delete mode 100755 Assets/ML-Agents/Timers/Greatest_map_ever_timers.json.meta delete mode 100755 ProjectSettings/ProjectVersion.txt diff --git a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json.meta b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json.meta deleted file mode 100755 index 1400775..0000000 --- a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: 80ef0d75029e25243857877facd14d75 -TextScriptImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/ProjectSettings/ProjectVersion.txt b/ProjectSettings/ProjectVersion.txt deleted file mode 100755 index bf8f7eb..0000000 --- a/ProjectSettings/ProjectVersion.txt +++ /dev/null @@ -1,2 +0,0 @@ -m_EditorVersion: 2019.4.36f1 -m_EditorVersionWithRevision: 2019.4.36f1 (660c164b2fc5) From 87568c4077fa09ca9106d5a544aa848f754a6d41 Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Sat, 7 May 2022 22:00:19 +0700 Subject: [PATCH 20/27] added scene --- .../Greatest_map_ever/Greatest_map_ever.unity | 212 +----------------- 1 file changed, 6 insertions(+), 206 deletions(-) mode change 100755 => 100644 Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity diff --git a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity old mode 100755 new mode 100644 index 19be7a0..1b0de26 --- a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity +++ b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity @@ -38,7 +38,7 @@ RenderSettings: m_ReflectionIntensity: 1 m_CustomReflection: {fileID: 0} m_Sun: {fileID: 705507994} - m_IndirectSpecularColor: {r: 0.44657815, g: 0.49641192, b: 0.57481617, a: 1} + m_IndirectSpecularColor: {r: 0.44657898, g: 0.4964133, b: 0.5748178, a: 1} m_UseRadianceAmbientProbe: 0 --- !u!157 &3 LightmapSettings: @@ -259,10 +259,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: -<<<<<<< HEAD navType: 1 -======= ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 PointId: 0 DeathAttr: 0 EnemiesSeen: [] @@ -358,10 +355,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: -<<<<<<< HEAD navType: 1 -======= ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 PointId: 0 DeathAttr: 0 EnemiesSeen: [] @@ -1130,7 +1124,7 @@ Transform: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 705507993} m_LocalRotation: {x: 0.40821788, y: -0.23456968, z: 0.10938163, w: 0.8754261} - m_LocalPosition: {x: 81.2, y: 64.1, z: -12.4} + m_LocalPosition: {x: 77.5, y: 55.2, z: -5.9} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} @@ -1261,12 +1255,6 @@ Transform: m_Father: {fileID: 671439045} m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} ---- !u!4 &868386701 stripped -Transform: - m_CorrespondingSourceObject: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - m_PrefabInstance: {fileID: 1809549197} - m_PrefabAsset: {fileID: 0} --- !u!1 &884498019 GameObject: m_ObjectHideFlags: 0 @@ -1300,80 +1288,6 @@ Transform: m_Father: {fileID: 782729761} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1001 &1061293905 -PrefabInstance: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 868386701} - m_Modifications: - - target: {fileID: 2836004473841745626, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_Controller - value: - objectReference: {fileID: 9100000, guid: 95a44e56d04c7d248ba723eda9611c51, type: 2} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_RootOrder - value: 2 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalPosition.y - value: -1 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalRotation.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalEulerAnglesHint.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalEulerAnglesHint.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalEulerAnglesHint.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591695743, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_Name - value: DragonFucker - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_SourcePrefab: {fileID: 100100000, guid: f432554f564e69242897607d34218939, type: 3} --- !u!1 &1116745543 GameObject: m_ObjectHideFlags: 0 @@ -1405,10 +1319,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: -<<<<<<< HEAD navType: 1 -======= ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 PointId: 0 DeathAttr: 0 EnemiesSeen: [] @@ -1840,7 +1751,7 @@ Transform: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1345085340} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 21.51, y: -5.782543, z: -10.46} + m_LocalPosition: {x: 47.446796, y: -5.782543, z: -25.400002} m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} m_Children: [] m_Father: {fileID: 2060099472} @@ -1858,10 +1769,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: -<<<<<<< HEAD navType: 1 -======= ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 PointId: 0 DeathAttr: 0 EnemiesSeen: [] @@ -1957,10 +1865,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: -<<<<<<< HEAD navType: 1 -======= ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 PointId: 0 DeathAttr: 0 EnemiesSeen: [] @@ -2182,10 +2087,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: -<<<<<<< HEAD navType: 1 -======= ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 PointId: 0 DeathAttr: 0 EnemiesSeen: [] @@ -2236,105 +2138,6 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1663305221} m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} ---- !u!1001 &1809549197 -PrefabInstance: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: -4942972567661207728, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_Enabled - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_RootOrder - value: 6 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalPosition.x - value: 42.23 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalPosition.y - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalPosition.z - value: -15.91 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalRotation.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalEulerAnglesHint.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalEulerAnglesHint.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalEulerAnglesHint.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763699, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_Name - value: Player - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763700, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: playerStance - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763700, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: playerCrouchStance.CameraHeight - value: 0.3 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763703, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_Enabled - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4528203471293941515, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203471293941515, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalPosition.z - value: -2.417 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_SourcePrefab: {fileID: 100100000, guid: 99bb17a23a489624baeaf337f91a4f84, type: 3} --- !u!1 &1858987083 GameObject: m_ObjectHideFlags: 0 @@ -2459,9 +2262,9 @@ MonoBehaviour: - {fileID: 140697606} spawnPointsForAttackersTeam: - {fileID: 2004854094} - AIPrefab: {fileID: 2988578997639256874, guid: b016874eb34cc084aa4359f0bbec50e1, + AIPrefab: {fileID: 6171680323948707524, guid: b016874eb34cc084aa4359f0bbec50e1, type: 3} - PlayerPrefab: {fileID: 5245491127989480125, guid: 99bb17a23a489624baeaf337f91a4f84, + PlayerPrefab: {fileID: 5245491127989480125, guid: 80f6c1c85e5daed4c96c70205ed5503d, type: 3} --- !u!114 &1858987091 MonoBehaviour: @@ -2728,10 +2531,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: -<<<<<<< HEAD navType: 1 -======= ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 PointId: 0 DeathAttr: 0 EnemiesSeen: [] @@ -2820,7 +2620,7 @@ Transform: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 2060099471} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 27.79, y: 7.782543, z: -15} + m_LocalPosition: {x: 19.553204, y: 7.782543, z: -15} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - {fileID: 1345085341} From f9a3b06ec37f5b9e18b1c6a89fa972fbb20116a5 Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Sat, 7 May 2022 22:02:15 +0700 Subject: [PATCH 21/27] files add --- Assets/Prefabs/Bot.prefab | 237 +++++++++++------- Assets/Prefabs/Player.prefab | 150 ++++++----- .../Scripts/Character/MovementController.cs | 21 +- Assets/Scripts/Character/NPC.cs | 26 +- Assets/Scripts/Managers/GameManager.cs | 1 - Assets/Scripts/Managers/MapManager.cs | 30 +-- .../Statistics/StatisticManager.cs.meta | 2 +- Assets/Settings/Game Settings.asset | 27 -- 8 files changed, 281 insertions(+), 213 deletions(-) diff --git a/Assets/Prefabs/Bot.prefab b/Assets/Prefabs/Bot.prefab index 0af3f18..5b177e0 100755 --- a/Assets/Prefabs/Bot.prefab +++ b/Assets/Prefabs/Bot.prefab @@ -1,6 +1,6 @@ %YAML 1.1 %TAG !u! tag:unity3d.com,2011: ---- !u!1 &2988578997639256874 +--- !u!1 &6171680323407988095 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} @@ -8,52 +8,85 @@ GameObject: m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - - component: {fileID: 2988578997639256870} - - component: {fileID: 2988578997639256869} - - component: {fileID: 2988578997639256868} - - component: {fileID: 2988578997639256875} - - component: {fileID: 5447337162552783061} - - component: {fileID: 7805954453358028498} - - component: {fileID: 2676446634235362783} - - component: {fileID: 8656710265340117963} - - component: {fileID: 778652956973742106} - - component: {fileID: 1208561866453126566} - m_Layer: 0 - m_Name: Bot + - component: {fileID: 6171680323407988094} + m_Layer: 8 + m_Name: FeetTransform m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!4 &2988578997639256870 +--- !u!4 &6171680323407988094 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2988578997639256874} + m_GameObject: {fileID: 6171680323407988095} m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 28.310326, y: 13.98, z: 46.45} + m_LocalPosition: {x: 0, y: -1, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] - m_Father: {fileID: 0} + m_Father: {fileID: 6171680323948707550} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!33 &2988578997639256869 +--- !u!1 &6171680323948707524 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6171680323948707550} + - component: {fileID: 6171680323948707551} + - component: {fileID: 5770895893828047079} + - component: {fileID: 6171680323948707521} + - component: {fileID: 6171680323948707549} + - component: {fileID: 6171680323948707520} + - component: {fileID: 8774702625908438859} + - component: {fileID: 6521536090983603910} + - component: {fileID: 6133354754598649724} + - component: {fileID: 2756943273076691504} + - component: {fileID: 6638271233700792696} + - component: {fileID: 5988018701276467001} + m_Layer: 8 + m_Name: Bot + m_TagString: Defender + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6171680323948707550 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6171680323948707524} + m_LocalRotation: {x: 0, y: 0.7071068, z: 0, w: 0.7071068} + m_LocalPosition: {x: 50.67923, y: 1.16, z: -22.57} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6171680323407988094} + m_Father: {fileID: 0} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} +--- !u!33 &6171680323948707551 MeshFilter: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2988578997639256874} + m_GameObject: {fileID: 6171680323948707524} m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!23 &2988578997639256868 +--- !u!23 &5770895893828047079 MeshRenderer: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2988578997639256874} + m_GameObject: {fileID: 6171680323948707524} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -86,94 +119,58 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!136 &2988578997639256875 +--- !u!136 &6171680323948707521 CapsuleCollider: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2988578997639256874} + m_GameObject: {fileID: 6171680323948707524} m_Material: {fileID: 0} m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.5 + m_Enabled: 0 + m_Radius: 0.3 m_Height: 2 m_Direction: 1 m_Center: {x: 0, y: 0, z: 0} ---- !u!114 &5447337162552783061 -MonoBehaviour: +--- !u!136 &6171680323948707549 +CapsuleCollider: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2988578997639256874} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: a6f2a081cfc8c4b4bb6864331109d147, type: 3} - m_Name: - m_EditorClassIdentifier: - agentParameters: - maxStep: 0 - hasUpgradedFromAgentParameters: 1 - MaxStep: 100 ---- !u!114 &7805954453358028498 -MonoBehaviour: + m_GameObject: {fileID: 6171680323948707524} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 0 + m_Radius: 0.3 + m_Height: 1.3 + m_Direction: 1 + m_Center: {x: 0, y: -0.35, z: 0} +--- !u!136 &6171680323948707520 +CapsuleCollider: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2988578997639256874} + m_GameObject: {fileID: 6171680323948707524} + m_Material: {fileID: 0} + m_IsTrigger: 0 m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 3a5c9d521e5ef4759a8246a07d52221e, type: 3} - m_Name: - m_EditorClassIdentifier: - DecisionPeriod: 1 - TakeActionsBetweenDecisions: 1 ---- !u!114 &2676446634235362783 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2988578997639256874} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} - m_Name: - m_EditorClassIdentifier: - m_BrainParameters: -<<<<<<< HEAD - VectorObservationSize: 9 -======= - VectorObservationSize: 4 ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 - NumStackedVectorObservations: 1 - m_ActionSpec: - m_NumContinuousActions: 0 - BranchSizes: 0200000002000000 - VectorActionSize: 0200000002000000 - VectorActionDescriptions: [] - VectorActionSpaceType: 0 - hasUpgradedBrainParametersWithActionSpec: 1 - m_Model: {fileID: 0} - m_InferenceDevice: 0 - m_BehaviorType: 0 - m_BehaviorName: npc - TeamId: 0 - m_UseChildSensors: 1 - m_UseChildActuators: 1 - m_ObservableAttributeHandling: 0 ---- !u!195 &8656710265340117963 + m_Radius: 0.3 + m_Height: 0.8 + m_Direction: 1 + m_Center: {x: 0, y: -0.6, z: 0} +--- !u!195 &8774702625908438859 NavMeshAgent: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2988578997639256874} + m_GameObject: {fileID: 6171680323948707524} m_Enabled: 1 m_AgentTypeID: 0 - m_Radius: 0.5 + m_Radius: 0.5000001 m_Speed: 3.5 m_Acceleration: 8 avoidancePriority: 50 @@ -186,32 +183,92 @@ NavMeshAgent: m_BaseOffset: 1 m_WalkableMask: 4294967295 m_ObstacleAvoidanceType: 4 ---- !u!114 &778652956973742106 +--- !u!114 &6521536090983603910 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2988578997639256874} + m_GameObject: {fileID: 6171680323948707524} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: d3ebcf807a37f344998fd648dfc9376d, type: 3} m_Name: m_EditorClassIdentifier: - navMeshAgent: {fileID: 8656710265340117963} + navMeshAgent: {fileID: 8774702625908438859} flag: {fileID: 6818223691859422291, guid: 1685c1d9ce4ab174f95c646b1826010b, type: 3} ---- !u!114 &1208561866453126566 +--- !u!114 &6133354754598649724 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2988578997639256874} + m_GameObject: {fileID: 6171680323948707524} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: dd8012d5925524537b27131fef517017, type: 3} m_Name: m_EditorClassIdentifier: m_SensorName: BufferSensor - m_ObservableSize: 5 - m_MaxNumObservables: 5 + m_ObservableSize: 0 + m_MaxNumObservables: 0 +--- !u!114 &2756943273076691504 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6171680323948707524} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + VectorObservationSize: 1 + NumStackedVectorObservations: 1 + m_ActionSpec: + m_NumContinuousActions: 0 + BranchSizes: 01000000 + VectorActionSize: 01000000 + VectorActionDescriptions: [] + VectorActionSpaceType: 0 + hasUpgradedBrainParametersWithActionSpec: 1 + m_Model: {fileID: 0} + m_InferenceDevice: 0 + m_BehaviorType: 0 + m_BehaviorName: My Behavior + TeamId: 0 + m_UseChildSensors: 1 + m_UseChildActuators: 1 + m_ObservableAttributeHandling: 0 +--- !u!114 &6638271233700792696 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6171680323948707524} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: a6f2a081cfc8c4b4bb6864331109d147, type: 3} + m_Name: + m_EditorClassIdentifier: + agentParameters: + maxStep: 0 + hasUpgradedFromAgentParameters: 1 + MaxStep: 100 +--- !u!114 &5988018701276467001 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6171680323948707524} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: d3ebcf807a37f344998fd648dfc9376d, type: 3} + m_Name: + m_EditorClassIdentifier: + navMeshAgent: {fileID: 8774702625908438859} + flag: {fileID: 6818223691859422291, guid: 1685c1d9ce4ab174f95c646b1826010b, type: 3} diff --git a/Assets/Prefabs/Player.prefab b/Assets/Prefabs/Player.prefab index c798d61..31319fc 100755 --- a/Assets/Prefabs/Player.prefab +++ b/Assets/Prefabs/Player.prefab @@ -14,9 +14,12 @@ GameObject: - component: {fileID: 5245491127989480120} - component: {fileID: 5245491127989480100} - component: {fileID: 5245491127989480121} - - component: {fileID: 5245491127989480123} - - component: {fileID: 5245491127989480122} - - component: {fileID: 4890899368932544690} + - component: {fileID: 7254047075221496626} + - component: {fileID: 5182704636738128575} + - component: {fileID: 5220658550450318085} + - component: {fileID: 4264677542023120457} + - component: {fileID: 4715950961444674817} + - component: {fileID: 5653209015090846528} m_Layer: 8 m_Name: Player m_TagString: Defender @@ -129,25 +132,29 @@ CapsuleCollider: m_Height: 0.8 m_Direction: 1 m_Center: {x: 0, y: -0.6, z: 0} ---- !u!143 &5245491127989480123 -CharacterController: +--- !u!195 &7254047075221496626 +NavMeshAgent: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 5245491127989480125} - m_Material: {fileID: 0} - m_IsTrigger: 0 m_Enabled: 1 - serializedVersion: 2 + m_AgentTypeID: 0 + m_Radius: 0.5000001 + m_Speed: 3.5 + m_Acceleration: 8 + avoidancePriority: 50 + m_AngularSpeed: 120 + m_StoppingDistance: 0 + m_AutoTraverseOffMeshLink: 1 + m_AutoBraking: 1 + m_AutoRepath: 1 m_Height: 2 - m_Radius: 0.5 - m_SlopeLimit: 45 - m_StepOffset: 0.3 - m_SkinWidth: 0.08 - m_MinMoveDistance: 0.001 - m_Center: {x: 0, y: 0, z: 0} ---- !u!114 &5245491127989480122 + m_BaseOffset: 1 + m_WalkableMask: 4294967295 + m_ObstacleAvoidanceType: 4 +--- !u!114 &5182704636738128575 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} @@ -156,52 +163,12 @@ MonoBehaviour: m_GameObject: {fileID: 5245491127989480125} m_Enabled: 1 m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 9826297ef4d853741b2af768441ec7f7, type: 3} + m_Script: {fileID: 11500000, guid: d3ebcf807a37f344998fd648dfc9376d, type: 3} m_Name: m_EditorClassIdentifier: - input_View: {x: 0, y: 0} - cameraHolder: {fileID: 5245491129196666053} - feetTransform: {fileID: 5245491129603592455} - playerSettings: - ViewXSensetivity: 20 - ViewYSensetivity: 20 - ViewXInverted: 0 - ViewYInverted: 0 - SprintingHold: 0 - MovementSmoothing: 0 - RunningForwardSpeed: 10 - RunningStrafeSpeed: 6 - WalkingForwardSpeed: 4 - WalkingBackwardSpeed: 2 - WalkingStrafeSpeed: 3 - JumpingHeight: 6 - JumpingFalloff: 1 - FallingSmoothing: 0 - SpeedEffector: 1 - CrouchSpeedEffector: 0 - ProneSpeedEffector: 0 - FallingSpeedEffector: 0 - ViewClampYMin: -70 - ViewClampYMax: 80 - playerMask: - serializedVersion: 2 - m_Bits: 55 - gravityAmount: 0.05 - gravityMin: -3 - jumpingForce: {x: 0, y: 0, z: 0} - playerStance: 0 - playerStanceSmoothing: 0.2 - playerStandStance: - CameraHeight: 0.7 - StanceCollider: {fileID: 5245491127989480120} - playerCrouchStance: - CameraHeight: 0.3 - StanceCollider: {fileID: 5245491127989480100} - playerProneStance: - CameraHeight: -0.58 - StanceCollider: {fileID: 5245491127989480121} - currentWeapon: {fileID: 8510909888689775086} ---- !u!114 &4890899368932544690 + navMeshAgent: {fileID: 0} + flag: {fileID: 0} +--- !u!114 &5220658550450318085 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} @@ -210,9 +177,72 @@ MonoBehaviour: m_GameObject: {fileID: 5245491127989480125} m_Enabled: 1 m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: a8c9a8e604d395c4ab9d03d28adc4982, type: 3} + m_Script: {fileID: 11500000, guid: dd8012d5925524537b27131fef517017, type: 3} m_Name: m_EditorClassIdentifier: + m_SensorName: BufferSensor + m_ObservableSize: 0 + m_MaxNumObservables: 0 +--- !u!114 &4264677542023120457 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5245491127989480125} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + VectorObservationSize: 1 + NumStackedVectorObservations: 1 + m_ActionSpec: + m_NumContinuousActions: 0 + BranchSizes: 01000000 + VectorActionSize: 01000000 + VectorActionDescriptions: [] + VectorActionSpaceType: 0 + hasUpgradedBrainParametersWithActionSpec: 1 + m_Model: {fileID: 0} + m_InferenceDevice: 0 + m_BehaviorType: 0 + m_BehaviorName: My Behavior + TeamId: 0 + m_UseChildSensors: 1 + m_UseChildActuators: 1 + m_ObservableAttributeHandling: 0 +--- !u!114 &4715950961444674817 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5245491127989480125} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: a6f2a081cfc8c4b4bb6864331109d147, type: 3} + m_Name: + m_EditorClassIdentifier: + agentParameters: + maxStep: 0 + hasUpgradedFromAgentParameters: 1 + MaxStep: 0 +--- !u!114 &5653209015090846528 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5245491127989480125} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: d3ebcf807a37f344998fd648dfc9376d, type: 3} + m_Name: + m_EditorClassIdentifier: + navMeshAgent: {fileID: 7254047075221496626} + flag: {fileID: 0} --- !u!1 &5245491128202443531 GameObject: m_ObjectHideFlags: 0 diff --git a/Assets/Scripts/Character/MovementController.cs b/Assets/Scripts/Character/MovementController.cs index 70341bb..98df2aa 100644 --- a/Assets/Scripts/Character/MovementController.cs +++ b/Assets/Scripts/Character/MovementController.cs @@ -9,27 +9,28 @@ public class MovementController : MonoBehaviour public int PointStartID { get; set; } public int PointEndID { get; private set; } public float FlagDistance { get; private set; } - private const float updateFlagPositionDelay = 5; - private const float updateReachedDestinationDelay = 5; + private const float UpdateFlagPositionDelay = 5; + private const float UpdateReachedDestinationDelay = 5; [SerializeField] private NavMeshAgent navMeshAgent; [SerializeField] private GameObject flag; public float DistanceToGo { get; private set; } public float RemainingDistance => navMeshAgent.remainingDistance; - private Dictionary idNavPointDict; + private Dictionary _idNavPointDict; private void Awake() { navMeshAgent.speed = SettingsReader.Instance.GetSettings.MovementSpeed; - idNavPointDict = MapManager.IDToNavPoint; - InvokeRepeating(nameof(UpdateFlagPosition), 0, updateFlagPositionDelay); - InvokeRepeating(nameof(ReachedDestination), 0, updateReachedDestinationDelay); + _idNavPointDict = MapManager.Instance.IDToNavPoint; + InvokeRepeating(nameof(UpdateFlagPosition), 0, UpdateFlagPositionDelay); + InvokeRepeating(nameof(ReachedDestination), 0, UpdateReachedDestinationDelay); } private void OnDestroy() { CancelInvoke(nameof(UpdateFlagPosition)); + CancelInvoke(nameof(ReachedDestination)); } private void UpdateFlagPosition() @@ -39,14 +40,14 @@ public class MovementController : MonoBehaviour public void MoveToRandomPoint() { - GoToNextNavPoint(MapManager.NavPoints[Random.Range(0, MapManager.NavPoints.Count)]); + GoToNextNavPoint(MapManager.Instance.NavPoints[Random.Range(0, MapManager.Instance.NavPoints.Count)]); } public List GetPointsCandidate() { - return MapManager.NavPoints + return MapManager.Instance.NavPoints .Where(point => - (idNavPointDict[PointStartID].Position - point.Position).magnitude < SettingsReader.Instance.GetSettings.MovementDistance) + (_idNavPointDict[PointStartID].Position - point.Position).magnitude < SettingsReader.Instance.GetSettings.MovementDistance) .ToList(); } @@ -62,7 +63,7 @@ public class MovementController : MonoBehaviour public void ReturnToStartPoint() { if (navMeshAgent.isStopped == true) navMeshAgent.isStopped = false; - navMeshAgent.SetDestination(idNavPointDict[PointStartID].Position); + navMeshAgent.SetDestination(_idNavPointDict[PointStartID].Position); PointEndID = PointStartID; PointStartID = -1; } diff --git a/Assets/Scripts/Character/NPC.cs b/Assets/Scripts/Character/NPC.cs index d47e377..36b76ec 100644 --- a/Assets/Scripts/Character/NPC.cs +++ b/Assets/Scripts/Character/NPC.cs @@ -5,7 +5,7 @@ using Unity.MLAgents.Actuators; using Unity.MLAgents.Sensors; using UnityEngine; -[RequireComponent(typeof(MovementController),typeof(BufferSensor))] +[RequireComponent(typeof(MovementController),typeof(BufferSensorComponent))] public class NPC : Agent, ICharacter { [HideInInspector] @@ -47,12 +47,12 @@ public class NPC : Agent, ICharacter moveController = gameObject.GetComponent(); bufferSensor = gameObject.GetComponent(); - + flagZone = GameObject.FindObjectOfType(); - if (flagZone == null) - Debug.LogError("Flag Is Not Setted"); - - navPointIdDict = MapManager.IDToNavPoint; + if (flagZone is null) + Debug.LogError("Flag Is Not Set"); + + navPointIdDict = MapManager.Instance.IDToNavPoint; if (navPointIdDict is null) Debug.LogError("Cant Find Nav Point Dictionary"); } @@ -64,12 +64,18 @@ public class NPC : Agent, ICharacter public override void OnEpisodeBegin() { + if (navPointIdDict is null) + Debug.LogError("Cant Find Nav Point Dictionary"); + NpcState = DirectState; flagZone = GameObject.FindObjectOfType(); } public override void CollectObservations(VectorSensor sensor) { + navPointIdDict = MapManager.Instance.IDToNavPoint; + if (navPointIdDict is null) + Debug.LogError("Cant Find Nav Point Dictionary"); var candidates = moveController.GetPointsCandidate(); //common sensors @@ -93,13 +99,14 @@ public class NPC : Agent, ICharacter //point sensors foreach (var point in candidates) { + var position = transform.position; bufferSensor.AppendObservation(new float[] { point.DeathAttr, (int)point.navType, //4 flagEnemyDistance - GameManager.IsCloserToFlagFromNextNavPoint(point, transform.position).ToInt(), + GameManager.IsCloserToFlagFromNextNavPoint(point, position).ToInt(), //5 EnemyVsNavPointDistance - GameManager.IsCloserToEnemyThanToNextNavPoint(point,transform.position, AgentCharacter.Team.GetOppositeTeam()).ToInt(), + GameManager.IsCloserToEnemyThanToNextNavPoint(point, position, AgentCharacter.Team.GetOppositeTeam()).ToInt(), //6 Have been seen by enemy in this point GameManager.IsHaveSeenByEnemy(AgentCharacter.Team.GetOppositeTeam(), point.Position).ToInt() @@ -165,11 +172,12 @@ public class NPC : Agent, ICharacter } public event Action OnDamageRecieved; - public void GetDamage(float damage) + public void GetDamage(int damage) { AgentCharacter.LastTimeHit = TimeManager.Instance.CurrentTime; Condition.GiveHealth(-Mathf.RoundToInt(damage * (1 - Condition.ArmourPoints * 0.5f))); Condition.GiveArmour(-Mathf.RoundToInt(Mathf.Sqrt(damage) * 5)); + OnDamageRecieved?.Invoke(damage, AgentCharacter.Team); if (Condition.HealthPoints < 0) { diff --git a/Assets/Scripts/Managers/GameManager.cs b/Assets/Scripts/Managers/GameManager.cs index 9299c9b..431fd81 100755 --- a/Assets/Scripts/Managers/GameManager.cs +++ b/Assets/Scripts/Managers/GameManager.cs @@ -24,7 +24,6 @@ public class GameManager : MonoBehaviour private void Start() { Academy.Instance.OnEnvironmentReset += ResetScene; - GlobalEventManager.OnCaptureFlag += FlagCaptured; GlobalEventManager.OnTimeLeft += TimeOut; diff --git a/Assets/Scripts/Managers/MapManager.cs b/Assets/Scripts/Managers/MapManager.cs index 14d70af..97a0c75 100755 --- a/Assets/Scripts/Managers/MapManager.cs +++ b/Assets/Scripts/Managers/MapManager.cs @@ -3,36 +3,36 @@ using UnityEngine; public class MapManager : MonoBehaviour { - private static MapManager instance; - public static MapManager Instance => instance; - [SerializeField] static List _navPoints; - public static List NavPoints { get => _navPoints; private set => _navPoints = value; } - public static Dictionary IDToNavPoint {get; private set; } + private static MapManager _instance; + public static MapManager Instance => _instance; + [SerializeField] private List _navPoints; + public List NavPoints { get => _navPoints; private set => _navPoints = value; } + public Dictionary IDToNavPoint {get; private set; } private void Awake() { - if (instance is null) - instance = this; + if (_instance is null) + _instance = this; else { Destroy(gameObject); Debug.LogError("Only 1 Instance"); } - } - - private void Start() - { + + NavPoints = new List(); var navPointSet = GameObject.Find("NavPoint Set"); var count = navPointSet.transform.childCount; - for (int i=0; i < count; i++) + for (var i=0; i < count; i++) NavPoints.Add(navPointSet.transform.GetChild(i) .gameObject.GetComponent()); - + print(NavPoints.Count); NavPointSetToID(); } + private void NavPointSetToID() { + IDToNavPoint = new Dictionary(); int i = 0; foreach (var navPoint in NavPoints) { @@ -45,8 +45,8 @@ public class MapManager : MonoBehaviour public static void AddDeathAttributeToPoints(int startPoint, int endPoint, float allDistance, float remainingDistance) { - var startNavPoint = IDToNavPoint[startPoint]; - var endNavPoint = IDToNavPoint[endPoint]; + var startNavPoint = _instance.IDToNavPoint[startPoint]; + var endNavPoint = _instance.IDToNavPoint[endPoint]; float coef; try { diff --git a/Assets/Scripts/Statistics/StatisticManager.cs.meta b/Assets/Scripts/Statistics/StatisticManager.cs.meta index 3a27c34..b8be93a 100644 --- a/Assets/Scripts/Statistics/StatisticManager.cs.meta +++ b/Assets/Scripts/Statistics/StatisticManager.cs.meta @@ -4,7 +4,7 @@ MonoImporter: externalObjects: {} serializedVersion: 2 defaultReferences: [] - executionOrder: 300 + executionOrder: 0 icon: {instanceID: 0} userData: assetBundleName: diff --git a/Assets/Settings/Game Settings.asset b/Assets/Settings/Game Settings.asset index ea0fad7..fc07488 100755 --- a/Assets/Settings/Game Settings.asset +++ b/Assets/Settings/Game Settings.asset @@ -12,7 +12,6 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: e2c47233b9062c84482336b145c6891b, type: 3} m_Name: Game Settings m_EditorClassIdentifier: -<<<<<<< HEAD IsTesting: 0 TimeToWin: 15 TimeOut: 3600 @@ -39,29 +38,3 @@ MonoBehaviour: DoDamageChanceInRunning: 30 DoDamageChanceInCover: 25 CrouchingCoefficient: 1.4 -======= - isTesting: 1 - timeToWin: 5 - timeOut: 1600 - movementDistance: 50 - movementSpeed: 3 - defTeamAI: 0 - atcTeamAI: 0 - numOfDefenders: 1 - numOfAttackers: 1 - hasHumanDefender: 1 - hasHumanAttacker: 0 - healthPickupAmount: 50 - armourPickupAmount: 50 - ammunitionPickupAmount: 120 - pickupsAmount: 2 - maxHealth: 100 - maxArmour: 100 - maxAmmo: 31337 - GetHitChanceInDirectPoint: 0 - GetHitChanceInRunning: 0 - GetHitChanceInCover: 0 - DoDamageChanceInDirectPoint: 0 - DoDamageChanceInRunning: 0 - DoDamageChanceInCover: 0 ->>>>>>> 351fa8ee12c499a0374be71a192a5f288611f467 From 8cce356b8e0edda609121fa376809d2e2628ed9d Mon Sep 17 00:00:00 2001 From: Andrey Gumirov Date: Sat, 7 May 2022 22:56:07 +0700 Subject: [PATCH 22/27] MVP work with ml-agents python library --- Assets/Prefabs/Bot.prefab | 27 +++++++-- Assets/Prefabs/Player.prefab.meta | 2 +- Assets/Scripts/Character/NPC.cs | 17 +++++- Assets/Settings/Game Settings.asset | 0 Packages/manifest.json | 7 +++ Packages/packages-lock.json | 79 +++++++++++++++++++++++++++ ProjectSettings/ProjectSettings.asset | 6 +- 7 files changed, 129 insertions(+), 9 deletions(-) mode change 100755 => 100644 Assets/Prefabs/Bot.prefab mode change 100755 => 100644 Assets/Prefabs/Player.prefab.meta mode change 100755 => 100644 Assets/Settings/Game Settings.asset diff --git a/Assets/Prefabs/Bot.prefab b/Assets/Prefabs/Bot.prefab old mode 100755 new mode 100644 index 5b177e0..add7a37 --- a/Assets/Prefabs/Bot.prefab +++ b/Assets/Prefabs/Bot.prefab @@ -50,6 +50,7 @@ GameObject: - component: {fileID: 2756943273076691504} - component: {fileID: 6638271233700792696} - component: {fileID: 5988018701276467001} + - component: {fileID: 1547882613481475944} m_Layer: 8 m_Name: Bot m_TagString: Defender @@ -210,8 +211,8 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: m_SensorName: BufferSensor - m_ObservableSize: 0 - m_MaxNumObservables: 0 + m_ObservableSize: 5 + m_MaxNumObservables: 10 --- !u!114 &2756943273076691504 MonoBehaviour: m_ObjectHideFlags: 0 @@ -225,19 +226,19 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: m_BrainParameters: - VectorObservationSize: 1 + VectorObservationSize: 14 NumStackedVectorObservations: 1 m_ActionSpec: m_NumContinuousActions: 0 - BranchSizes: 01000000 - VectorActionSize: 01000000 + BranchSizes: 010000000100000001000000 + VectorActionSize: 010000000100000001000000 VectorActionDescriptions: [] VectorActionSpaceType: 0 hasUpgradedBrainParametersWithActionSpec: 1 m_Model: {fileID: 0} m_InferenceDevice: 0 m_BehaviorType: 0 - m_BehaviorName: My Behavior + m_BehaviorName: npc TeamId: 0 m_UseChildSensors: 1 m_UseChildActuators: 1 @@ -272,3 +273,17 @@ MonoBehaviour: m_EditorClassIdentifier: navMeshAgent: {fileID: 8774702625908438859} flag: {fileID: 6818223691859422291, guid: 1685c1d9ce4ab174f95c646b1826010b, type: 3} +--- !u!114 &1547882613481475944 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6171680323948707524} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 3a5c9d521e5ef4759a8246a07d52221e, type: 3} + m_Name: + m_EditorClassIdentifier: + DecisionPeriod: 5 + TakeActionsBetweenDecisions: 0 diff --git a/Assets/Prefabs/Player.prefab.meta b/Assets/Prefabs/Player.prefab.meta old mode 100755 new mode 100644 index 1fa6659..43cb0ce --- a/Assets/Prefabs/Player.prefab.meta +++ b/Assets/Prefabs/Player.prefab.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 80f6c1c85e5daed4c96c70205ed5503d +guid: a7480b77908b042d8adcdd84e8c2c15e PrefabImporter: externalObjects: {} userData: diff --git a/Assets/Scripts/Character/NPC.cs b/Assets/Scripts/Character/NPC.cs index 36b76ec..7af2ae9 100644 --- a/Assets/Scripts/Character/NPC.cs +++ b/Assets/Scripts/Character/NPC.cs @@ -73,6 +73,7 @@ public class NPC : Agent, ICharacter public override void CollectObservations(VectorSensor sensor) { + // Debug.Log("Collect observations called!"); navPointIdDict = MapManager.Instance.IDToNavPoint; if (navPointIdDict is null) Debug.LogError("Cant Find Nav Point Dictionary"); @@ -88,6 +89,7 @@ public class NPC : Agent, ICharacter sensor.AddObservation(candidates.Count); sensor.AddObservation(moveController.PointStartID); sensor.AddObservation(moveController.PointEndID); + // Debug.Log("Done common!"); //state sensors sensor.AddObservation((int)NpcState.State); sensor.AddObservation((int)NpcBodyState.State); @@ -95,6 +97,7 @@ public class NPC : Agent, ICharacter sensor.AddObservation(navPointIdDict[moveController.PointStartID].DeathAttr); sensor.AddObservation(navPointIdDict[moveController.PointEndID].DeathAttr); sensor.AddObservation(moveController.FlagDistance); + // Debug.Log("Done state sensors!"); //point sensors foreach (var point in candidates) @@ -112,11 +115,14 @@ public class NPC : Agent, ICharacter point.Position).ToInt() }); } + // Debug.Log("Done collect observations!"); } public override void OnActionReceived(ActionBuffers actions) { + // Debug.Log("Actions recieved!"); var result = actions.DiscreteActions; + // Debug.Log(result[0] + " " + result[1]); if (result[0] == 0) { if (navPointIdDict[moveController.PointStartID].navType != NavPointType.Cover) @@ -132,18 +138,26 @@ public class NPC : Agent, ICharacter default: throw new ArgumentException("Undefined Action recieved"); } } + // Debug.Log(result[0] == 1); if (result[0] == 1) { + // Debug.Log("BEFORE SOme shitty if >:("); if (navPointIdDict[moveController.PointStartID].navType != NavPointType.Direction) + { + // Debug.Log("SOme shitty if >:("); return; + } + // Debug.Log("FUCK"); + switch (result[1]) { case 0: moveController.GoToNextNavPoint(navPointIdDict[result[2]]); - NpcState = RunningState; break; + NpcState = RunningState; Debug.Log("Go to point " + result[2]);break; case 1: NpcState = DirectState; break; default: throw new ArgumentException("Undefined Action recieved"); } } + if (result[0] == 2) { if (moveController.PointStartID == moveController.PointEndID && moveController.PointEndID != -1) @@ -155,6 +169,7 @@ public class NPC : Agent, ICharacter default: throw new ArgumentException("Undefined Action recieved"); } } + // Debug.Log("Actions processed!"); } #endregion diff --git a/Assets/Settings/Game Settings.asset b/Assets/Settings/Game Settings.asset old mode 100755 new mode 100644 diff --git a/Packages/manifest.json b/Packages/manifest.json index a5b072e..e756f82 100755 --- a/Packages/manifest.json +++ b/Packages/manifest.json @@ -1,17 +1,24 @@ { "dependencies": { + "com.unity.2d.sprite": "1.0.0", + "com.unity.2d.tilemap": "1.0.0", + "com.unity.ads": "3.7.5", + "com.unity.analytics": "3.6.12", "com.unity.collab-proxy": "1.14.12", "com.unity.ide.rider": "1.2.1", "com.unity.ide.visualstudio": "2.0.14", "com.unity.ide.vscode": "1.2.4", "com.unity.inputsystem": "1.3.0", "com.unity.ml-agents": "2.0.1", + "com.unity.multiplayer-hlapi": "1.0.8", "com.unity.probuilder": "4.5.2", + "com.unity.purchasing": "4.1.2", "com.unity.test-framework": "1.1.30", "com.unity.textmeshpro": "2.1.6", "com.unity.timeline": "1.2.18", "com.unity.toolchain.win-x86_64-linux-x86_64": "2.0.0", "com.unity.ugui": "1.0.0", + "com.unity.xr.legacyinputhelpers": "2.1.8", "com.unity.modules.ai": "1.0.0", "com.unity.modules.androidjni": "1.0.0", "com.unity.modules.animation": "1.0.0", diff --git a/Packages/packages-lock.json b/Packages/packages-lock.json index bb10cfc..0bac4b2 100755 --- a/Packages/packages-lock.json +++ b/Packages/packages-lock.json @@ -1,5 +1,35 @@ { "dependencies": { + "com.unity.2d.sprite": { + "version": "1.0.0", + "depth": 0, + "source": "builtin", + "dependencies": {} + }, + "com.unity.2d.tilemap": { + "version": "1.0.0", + "depth": 0, + "source": "builtin", + "dependencies": {} + }, + "com.unity.ads": { + "version": "3.7.5", + "depth": 0, + "source": "registry", + "dependencies": { + "com.unity.ugui": "1.0.0" + }, + "url": "https://packages.unity.com" + }, + "com.unity.analytics": { + "version": "3.6.12", + "depth": 0, + "source": "registry", + "dependencies": { + "com.unity.ugui": "1.0.0" + }, + "url": "https://packages.unity.com" + }, "com.unity.barracuda": { "version": "2.0.0", "depth": 1, @@ -86,6 +116,15 @@ }, "url": "https://packages.unity.com" }, + "com.unity.multiplayer-hlapi": { + "version": "1.0.8", + "depth": 0, + "source": "registry", + "dependencies": { + "nuget.mono-cecil": "0.1.6-preview" + }, + "url": "https://packages.unity.com" + }, "com.unity.probuilder": { "version": "4.5.2", "depth": 0, @@ -95,6 +134,29 @@ }, "url": "https://packages.unity.com" }, + "com.unity.purchasing": { + "version": "4.1.2", + "depth": 0, + "source": "registry", + "dependencies": { + "com.unity.ugui": "1.0.0", + "com.unity.modules.unityanalytics": "1.0.0", + "com.unity.modules.unitywebrequest": "1.0.0", + "com.unity.modules.jsonserialize": "1.0.0", + "com.unity.modules.androidjni": "1.0.0", + "com.unity.services.core": "1.0.1" + }, + "url": "https://packages.unity.com" + }, + "com.unity.services.core": { + "version": "1.0.1", + "depth": 1, + "source": "registry", + "dependencies": { + "com.unity.modules.unitywebrequest": "1.0.0" + }, + "url": "https://packages.unity.com" + }, "com.unity.settings-manager": { "version": "1.0.3", "depth": 1, @@ -169,6 +231,23 @@ "com.unity.modules.imgui": "1.0.0" } }, + "com.unity.xr.legacyinputhelpers": { + "version": "2.1.8", + "depth": 0, + "source": "registry", + "dependencies": { + "com.unity.modules.vr": "1.0.0", + "com.unity.modules.xr": "1.0.0" + }, + "url": "https://packages.unity.com" + }, + "nuget.mono-cecil": { + "version": "0.1.6-preview", + "depth": 1, + "source": "registry", + "dependencies": {}, + "url": "https://packages.unity.com" + }, "com.unity.modules.ai": { "version": "1.0.0", "depth": 0, diff --git a/ProjectSettings/ProjectSettings.asset b/ProjectSettings/ProjectSettings.asset index dbf6f6c..b6bb648 100644 --- a/ProjectSettings/ProjectSettings.asset +++ b/ProjectSettings/ProjectSettings.asset @@ -614,7 +614,11 @@ PlayerSettings: webGLLinkerTarget: 1 webGLThreadsSupport: 0 webGLWasmStreaming: 0 - scriptingDefineSymbols: {} + scriptingDefineSymbols: + 1: CROSS_PLATFORM_INPUT + 4: CROSS_PLATFORM_INPUT;MOBILE_INPUT + 7: CROSS_PLATFORM_INPUT;MOBILE_INPUT + 14: MOBILE_INPUT platformArchitecture: {} scriptingBackend: {} il2cppCompilerConfiguration: {} From fa6b5f602ee65d16f94119d8ed74599852c988b4 Mon Sep 17 00:00:00 2001 From: Andrey Gumirov Date: Sat, 7 May 2022 22:58:25 +0700 Subject: [PATCH 23/27] Added test NB --- test-ml-agents.ipynb | 20623 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 20623 insertions(+) create mode 100644 test-ml-agents.ipynb diff --git a/test-ml-agents.ipynb b/test-ml-agents.ipynb new file mode 100644 index 0000000..1e2d31e --- /dev/null +++ b/test-ml-agents.ipynb @@ -0,0 +1,20623 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 34, + "id": "8104e2db-b1a8-40a0-a238-5d9149fd74b0", + "metadata": {}, + "outputs": [], + "source": [ + "from mlagents_envs.environment import UnityEnvironment\n", + "import mlagents_envs\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 74, + "id": "6f477382-acc9-4aec-907a-7f58caf955ed", + "metadata": {}, + "outputs": [], + "source": [ + "import random" + ] + }, + { + "cell_type": "code", + "execution_count": 83, + "id": "b7f60f26-0a90-4ea5-b2c2-b5683bda56a6", + "metadata": {}, + "outputs": [], + "source": [ + "env = UnityEnvironment()" + ] + }, + { + "cell_type": "code", + "execution_count": 84, + "id": "5929b410-12c3-4bd9-b984-b2c29a76c3f3", + "metadata": {}, + "outputs": [], + "source": [ + "env.reset()" + ] + }, + { + "cell_type": "code", + "execution_count": 85, + "id": "f108ff09-9f42-4405-add3-6df941c48f8b", + "metadata": { + "scrolled": true, + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 1. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 1. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n" + ] + }, + { + "ename": "UnityCommunicatorStoppedException", + "evalue": "Communicator has exited.", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mUnityCommunicatorStoppedException\u001b[0m Traceback (most recent call last)", + "Input \u001b[0;32mIn [85]\u001b[0m, in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[0;32m----> 2\u001b[0m \u001b[43menv\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 3\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 4\u001b[0m asd \u001b[38;5;241m=\u001b[39m env\u001b[38;5;241m.\u001b[39mget_steps(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mnpc?team=0\u001b[39m\u001b[38;5;124m'\u001b[39m)\n", + "File \u001b[0;32m~/opt/miniforge3/lib/python3.9/site-packages/mlagents_envs/timers.py:305\u001b[0m, in \u001b[0;36mtimed..wrapped\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 303\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mwrapped\u001b[39m(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 304\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m hierarchical_timer(func\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__qualname__\u001b[39m):\n\u001b[0;32m--> 305\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/opt/miniforge3/lib/python3.9/site-packages/mlagents_envs/environment.py:350\u001b[0m, in \u001b[0;36mUnityEnvironment.step\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 348\u001b[0m outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_communicator\u001b[38;5;241m.\u001b[39mexchange(step_input, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_poll_process)\n\u001b[1;32m 349\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m outputs \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 350\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m UnityCommunicatorStoppedException(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCommunicator has exited.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 351\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_update_behavior_specs(outputs)\n\u001b[1;32m 352\u001b[0m rl_output \u001b[38;5;241m=\u001b[39m outputs\u001b[38;5;241m.\u001b[39mrl_output\n", + "\u001b[0;31mUnityCommunicatorStoppedException\u001b[0m: Communicator has exited." + ] + } + ], + "source": [ + "while True:\n", + " env.step()\n", + " print(\"Step\")\n", + " asd = env.get_steps('npc?team=0')\n", + " print(asd)\n", + " print(asd[0][0])\n", + " _id = asd[0][0].obs[0][0][0]\n", + " print(_id)\n", + " env.set_action_for_agent('npc?team=0', 0, mlagents_envs.environment.ActionTuple(discrete=np.array([[1, 0, random.randint(0,2)]])))" + ] + }, + { + "cell_type": "code", + "execution_count": 86, + "id": "db100c84-22ab-491b-b68d-4d5c1bbc66a3", + "metadata": {}, + "outputs": [], + "source": [ + "env.close()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From aa7b50554b7b572df14bc2a184b240e4a972b78e Mon Sep 17 00:00:00 2001 From: Andrey Gumirov Date: Sat, 7 May 2022 23:00:24 +0700 Subject: [PATCH 24/27] Added project version --- ProjectSettings/ProjectVersion.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 ProjectSettings/ProjectVersion.txt diff --git a/ProjectSettings/ProjectVersion.txt b/ProjectSettings/ProjectVersion.txt new file mode 100644 index 0000000..bf8f7eb --- /dev/null +++ b/ProjectSettings/ProjectVersion.txt @@ -0,0 +1,2 @@ +m_EditorVersion: 2019.4.36f1 +m_EditorVersionWithRevision: 2019.4.36f1 (660c164b2fc5) From 6aa872c92f9f227429356704d8473cec23ec31c5 Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Sat, 7 May 2022 23:08:32 +0700 Subject: [PATCH 25/27] Runtime bot behavior instantiation --- Assets/Scripts/Bots/CharacterFactory.cs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/Assets/Scripts/Bots/CharacterFactory.cs b/Assets/Scripts/Bots/CharacterFactory.cs index 0ca7c1d..ba1f681 100644 --- a/Assets/Scripts/Bots/CharacterFactory.cs +++ b/Assets/Scripts/Bots/CharacterFactory.cs @@ -1,4 +1,6 @@ using System.Collections.Generic; +using Unity.Barracuda; +using Unity.MLAgents.Policies; using UnityEngine; public class CharacterFactory : MonoBehaviour @@ -70,7 +72,9 @@ public class CharacterFactory : MonoBehaviour } else { - gameobject.GetComponent().GetCharacter.Team = team; + var npc = gameobject.GetComponent(); + npc.GetCharacter.Team = team; + npc.SetModel(team.ToString(), ScriptableObject.CreateInstance(), InferenceDevice.Default ); gameobject.GetComponent().PointStartID = spawnPoint.PointId; bots.Add(gameobject); } From 12d27d36ce7513259d88c72dc3c6625e4c9ac666 Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Sat, 7 May 2022 23:17:08 +0700 Subject: [PATCH 26/27] _ --- ProjectSettings/ProjectVersion.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 ProjectSettings/ProjectVersion.txt diff --git a/ProjectSettings/ProjectVersion.txt b/ProjectSettings/ProjectVersion.txt new file mode 100644 index 0000000..89a11d7 --- /dev/null +++ b/ProjectSettings/ProjectVersion.txt @@ -0,0 +1,2 @@ +m_EditorVersion: 2019.4.35f1 +m_EditorVersionWithRevision: 2019.4.35f1 (0462406dff2e) From d617ddabe7a25fab5e80e47ba1e4a29f1f341fa8 Mon Sep 17 00:00:00 2001 From: Krazerleo Date: Mon, 9 May 2022 21:31:21 +0700 Subject: [PATCH 27/27] Added Animations. Changed inspector view for navpoints. A lot of fixes. Changed project structure. --- Assets/{Defaults.meta => Arts.meta} | 2 +- Assets/{ => Arts}/Materials.meta | 0 Assets/{ => Arts}/Materials/New Material.mat | 0 .../Materials/New Material.mat.meta | 0 Assets/{ => Arts}/Models.meta | 0 Assets/{ => Arts}/Models/Flag model.fbx | 0 Assets/{ => Arts}/Models/Flag model.fbx.meta | 0 Assets/{ => Arts}/Models/Textures.meta | 0 Assets/{ => Arts}/Models/Textures/Flag.jpg | 0 .../{ => Arts}/Models/Textures/Flag.jpg.meta | 0 .../{ => Arts}/SciFiWarriorPBRHPPolyart.meta | 0 .../SciFiWarriorPBRHPPolyart/Animations.meta | 0 .../Animations/AvatarMask.mask | 0 .../Animations/AvatarMask.mask.meta | 0 .../Animations/Die.fbx | 0 .../Animations/Die.fbx.meta | 619 +- .../Animations/Idle_Ducking_ar.fbx | 0 .../Animations/Idle_Ducking_ar.fbx.meta | 623 +- .../Animations/Idle_Shoot_ar.fbx | 0 .../Animations/Idle_Shoot_ar.fbx.meta | 0 .../Animations/Idle_guard_ar.fbx | 0 .../Animations/Idle_guard_ar.fbx.meta | 0 .../Animations/Idle_gunMiddle_ar.fbx | 0 .../Animations/Idle_gunMiddle_ar.fbx.meta | 619 +- .../Animations/Jump.fbx | 0 .../Animations/Jump.fbx.meta | 0 .../Animations/Reload.fbx | 0 .../Animations/Reload.fbx.meta | 0 .../Animations/Run_guard_AR.fbx | 0 .../Animations/Run_guard_AR.fbx.meta | 0 .../Animations/Run_gunMiddle_AR.fbx | 0 .../Animations/Run_gunMiddle_AR.fbx.meta | 0 .../Animations/Shoot_AutoShot_AR.fbx | 0 .../Animations/Shoot_AutoShot_AR.fbx.meta | 0 .../Animations/Shoot_BurstShot_AR.fbx | 0 .../Animations/Shoot_BurstShot_AR.fbx.meta | 0 .../Animations/Shoot_SingleShot_AR.fbx | 0 .../Animations/Shoot_SingleShot_AR.fbx.meta | 0 .../Animations/WalkBack_Shoot_ar.fbx | 0 .../Animations/WalkBack_Shoot_ar.fbx.meta | 0 .../Animations/WalkFront_Shoot_ar.fbx | 0 .../Animations/WalkFront_Shoot_ar.fbx.meta | 0 .../Animations/WalkLeft_Shoot_ar.fbx | 0 .../Animations/WalkLeft_Shoot_ar.fbx.meta | 0 .../Animations/WalkRight_Shoot_ar.fbx | 0 .../Animations/WalkRight_Shoot_ar.fbx.meta | 0 .../SciFiWarriorPBRHPPolyart/Animators.meta | 0 .../Animators/SciFiWarrior.controller | 93 + .../Animators/SciFiWarrior.controller.meta | 0 .../Animators/SciFiWarriorOur.controller | 0 .../Animators/SciFiWarriorOur.controller.meta | 0 .../SciFiWarriorPBRHPPolyart/Materials.meta | 0 .../SciFiWarriorPBRHPPolyart/Materials/HP.mat | 0 .../Materials/HP.mat.meta | 0 .../Materials/PBR.mat | 0 .../Materials/PBR.mat.meta | 0 .../Materials/Polyart.mat | 0 .../Materials/Polyart.mat.meta | 0 .../Materials/Skybox_Mat.mat | 178 +- .../Materials/Skybox_Mat.mat.meta | 0 .../Materials/Stage.mat | 0 .../Materials/Stage.mat.meta | 0 .../SciFiWarriorPBRHPPolyart/Mesh.meta | 0 .../Mesh/PBR_HP_Mesh.fbx | 0 .../Mesh/PBR_HP_Mesh.fbx.meta | 0 .../Mesh/Polyart_Mesh.fbx | 0 .../Mesh/Polyart_Mesh.fbx.meta | 0 .../SciFiWarriorPBRHPPolyart/Mesh/Stage.fbx | 0 .../Mesh/Stage.fbx.meta | 0 .../SciFiWarriorPBRHPPolyart/Prefabs.meta | 0 .../Prefabs/HPCharacter.prefab | 0 .../Prefabs/HPCharacter.prefab.meta | 0 .../Prefabs/PBRCharacter.prefab | 0 .../Prefabs/PBRCharacter.prefab.meta | 0 .../Prefabs/PolyartCharacter.prefab | 102 + .../Prefabs/PolyartCharacter.prefab.meta | 0 .../SciFiWarriorPBRHPPolyart/Scene.meta | 0 .../Scene/DemoScene01.unity | 0 .../Scene/DemoScene01.unity.meta | 0 .../SciFiWarriorPBRHPPolyart/Textures.meta | 0 .../Textures/HP_Albedo.png | 0 .../Textures/HP_Albedo.png.meta | 0 .../Textures/PBR_Albedo.png | 0 .../Textures/PBR_Albedo.png.meta | 0 .../Textures/PBR_Free_AO.png | 0 .../Textures/PBR_Free_AO.png.meta | 0 .../Textures/PBR_Free_EM.png | 0 .../Textures/PBR_Free_EM.png.meta | 0 .../Textures/PBR_Free_MS.png | 0 .../Textures/PBR_Free_MS.png.meta | 0 .../Textures/PBR_Free_NM.png | 0 .../Textures/PBR_Free_NM.png.meta | 0 .../Textures/Polyart.png | 0 .../Textures/Polyart.png.meta | 0 .../Textures/PolyartEmission.png | 0 .../Textures/PolyartEmission.png.meta | 0 .../SciFiWarriorPBRHPPolyart/promotion.jpg | 0 .../promotion.jpg.meta | 0 Assets/Defaults/mat_proBuilder.mat | 77 - Assets/Inputs/InputSystem.inputsettings.asset | 35 + .../InputSystem.inputsettings.asset.meta | 8 + Assets/Inputs/ThirdPersonViewInput.cs | 354 + Assets/Inputs/ThirdPersonViewInput.cs.meta | 11 + .../Inputs/ThirdPersonViewInput.inputactions | 166 + .../ThirdPersonViewInput.inputactions.meta | 14 + .../ML-Agents/Timers/DemoScene01_timers.json | 1 - .../Timers/dont touch me plz_timers.json | 1 - Assets/Prefabs/CharacterPrefabs.meta | 8 + .../Prefabs/{ => CharacterPrefabs}/Bot.prefab | 0 .../{ => CharacterPrefabs}/Bot.prefab.meta | 0 .../Prefabs/CharacterPrefabs/Character.prefab | 2639 ++ .../CharacterPrefabs/Character.prefab.meta} | 4 +- .../{ => CharacterPrefabs}/Player.prefab | 0 .../{ => CharacterPrefabs}/Player.prefab.meta | 0 Assets/Prefabs/EntityPrefabs.meta | 8 + .../{ => EntityPrefabs}/Flag zone.prefab | 0 .../{ => EntityPrefabs}/Flag zone.prefab.meta | 0 .../EntityPrefabs/navPointPrefab.prefab | 49 + .../EntityPrefabs/navPointPrefab.prefab.meta | 7 + Assets/Prefabs/PickupPrefabs.meta | 8 + .../Prefabs/{ => PickupPrefabs}/ammo.prefab | 0 .../{ => PickupPrefabs}/ammo.prefab.meta | 0 .../Prefabs/{ => PickupPrefabs}/armour.prefab | 0 .../{ => PickupPrefabs}/armour.prefab.meta | 0 .../Prefabs/{ => PickupPrefabs}/health.prefab | 0 .../{ => PickupPrefabs}/health.prefab.meta | 0 Assets/Resources.meta | 8 + Assets/Resources/BillingMode.json | 1 + .../BillingMode.json.meta} | 2 +- Assets/Scenes/AnimationTestScene.unity | 1060 + ...ity.meta => AnimationTestScene.unity.meta} | 2 +- .../Greatest_map_ever/Greatest_map_ever.unity | 166 +- .../Scenes/{First try.unity => Map#1.unity} | 0 ...{First try.unity.meta => Map#1.unity.meta} | 0 Assets/Scenes/dont touch me plz.unity | 460 - Assets/Scripts/Animators.meta | 3 + Assets/Scripts/Animators/Kirill Animator.meta | 3 + .../CustomCharacterController.cs} | 508 +- .../CustomCharacterController.cs.meta} | 0 .../Animators/Kirill Animator/Models.cs | 67 + .../Kirill Animator/Models.cs.meta} | 0 Assets/Scripts/Animators/Leonid Animator.meta | 3 + .../Leonid Animator/AnimatorHandler.cs | 94 + .../Leonid Animator/AnimatorHandler.cs.meta | 3 + .../CharacterAnimator.controller} | 1289 +- .../CharacterAnimator.controller.meta} | 4 +- .../Leonid Animator/CharacterLocomotion.cs | 105 + .../CharacterLocomotion.cs.meta | 3 + .../Animators/Leonid Animator/InputHandler.cs | 77 + .../Leonid Animator/InputHandler.cs.meta | 3 + .../Animators/Leonid Animator/LowerBody.mask | 139 + .../Leonid Animator/LowerBody.mask.meta | 8 + .../Animators/Leonid Animator/UpperBody.mask | 139 + .../Leonid Animator/UpperBody.mask.meta | 8 + Assets/Scripts/CameraScripts.meta | 3 + Assets/Scripts/CameraScripts/CameraHandler.cs | 61 + .../CameraScripts/CameraHandler.cs.meta | 3 + Assets/Scripts/Character/NPC.cs | 1 + Assets/Scripts/Character/scr_Models.cs | 75 - Assets/Scripts/Misc/NavPoint.cs | 9 +- ...eaponController.cs => WeaponController.cs} | 93 +- ...oller.cs.meta => WeaponController.cs.meta} | 0 ProjectSettings/EditorBuildSettings.asset | 4 +- ProjectSettings/ProjectVersion.txt | 2 + ProjectSettings/TagManager.asset | 2 +- test-ml-agents.ipynb | 20623 ++++++++++++++++ 166 files changed, 28460 insertions(+), 2197 deletions(-) rename Assets/{Defaults.meta => Arts.meta} (77%) mode change 100755 => 100644 rename Assets/{ => Arts}/Materials.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/Materials/New Material.mat (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/Materials/New Material.mat.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/Models.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/Models/Flag model.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/Models/Flag model.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/Models/Textures.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/Models/Textures/Flag.jpg (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/Models/Textures/Flag.jpg.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/AvatarMask.mask (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/AvatarMask.mask.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Die.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Die.fbx.meta (79%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Idle_Ducking_ar.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Idle_Ducking_ar.fbx.meta (79%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Idle_Shoot_ar.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Idle_Shoot_ar.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Idle_guard_ar.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Idle_guard_ar.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Idle_gunMiddle_ar.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Idle_gunMiddle_ar.fbx.meta (79%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Jump.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Jump.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Reload.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Reload.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Run_guard_AR.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Run_guard_AR.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Run_gunMiddle_AR.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Run_gunMiddle_AR.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Shoot_AutoShot_AR.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Shoot_AutoShot_AR.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Shoot_BurstShot_AR.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Shoot_BurstShot_AR.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Shoot_SingleShot_AR.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/Shoot_SingleShot_AR.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/WalkBack_Shoot_ar.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/WalkBack_Shoot_ar.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/WalkFront_Shoot_ar.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/WalkFront_Shoot_ar.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/WalkLeft_Shoot_ar.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/WalkLeft_Shoot_ar.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/WalkRight_Shoot_ar.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animations/WalkRight_Shoot_ar.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animators.meta (100%) mode change 100755 => 100644 create mode 100644 Assets/Arts/SciFiWarriorPBRHPPolyart/Animators/SciFiWarrior.controller rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animators/SciFiWarrior.controller.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animators/SciFiWarriorOur.controller (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Animators/SciFiWarriorOur.controller.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Materials.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Materials/HP.mat (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Materials/HP.mat.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Materials/PBR.mat (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Materials/PBR.mat.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Materials/Polyart.mat (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Materials/Polyart.mat.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Materials/Skybox_Mat.mat (96%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Materials/Skybox_Mat.mat.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Materials/Stage.mat (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Materials/Stage.mat.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Mesh.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Mesh/PBR_HP_Mesh.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Mesh/PBR_HP_Mesh.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Mesh/Polyart_Mesh.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Mesh/Polyart_Mesh.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Mesh/Stage.fbx (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Mesh/Stage.fbx.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Prefabs.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Prefabs/HPCharacter.prefab (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Prefabs/HPCharacter.prefab.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Prefabs/PBRCharacter.prefab (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Prefabs/PBRCharacter.prefab.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Prefabs/PolyartCharacter.prefab (96%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Prefabs/PolyartCharacter.prefab.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Scene.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Scene/DemoScene01.unity (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Scene/DemoScene01.unity.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/HP_Albedo.png (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/HP_Albedo.png.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/PBR_Albedo.png (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/PBR_Albedo.png.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_AO.png (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_AO.png.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_EM.png (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_EM.png.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_MS.png (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_MS.png.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_NM.png (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_NM.png.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/Polyart.png (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/Polyart.png.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/PolyartEmission.png (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/Textures/PolyartEmission.png.meta (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/promotion.jpg (100%) mode change 100755 => 100644 rename Assets/{ => Arts}/SciFiWarriorPBRHPPolyart/promotion.jpg.meta (100%) mode change 100755 => 100644 delete mode 100755 Assets/Defaults/mat_proBuilder.mat create mode 100644 Assets/Inputs/InputSystem.inputsettings.asset create mode 100644 Assets/Inputs/InputSystem.inputsettings.asset.meta create mode 100644 Assets/Inputs/ThirdPersonViewInput.cs create mode 100644 Assets/Inputs/ThirdPersonViewInput.cs.meta create mode 100644 Assets/Inputs/ThirdPersonViewInput.inputactions create mode 100644 Assets/Inputs/ThirdPersonViewInput.inputactions.meta delete mode 100755 Assets/ML-Agents/Timers/DemoScene01_timers.json delete mode 100755 Assets/ML-Agents/Timers/dont touch me plz_timers.json create mode 100644 Assets/Prefabs/CharacterPrefabs.meta rename Assets/Prefabs/{ => CharacterPrefabs}/Bot.prefab (100%) rename Assets/Prefabs/{ => CharacterPrefabs}/Bot.prefab.meta (100%) mode change 100755 => 100644 create mode 100644 Assets/Prefabs/CharacterPrefabs/Character.prefab rename Assets/{ML-Agents/Timers/dont touch me plz_timers.json.meta => Prefabs/CharacterPrefabs/Character.prefab.meta} (62%) mode change 100755 => 100644 rename Assets/Prefabs/{ => CharacterPrefabs}/Player.prefab (100%) mode change 100755 => 100644 rename Assets/Prefabs/{ => CharacterPrefabs}/Player.prefab.meta (100%) create mode 100644 Assets/Prefabs/EntityPrefabs.meta rename Assets/Prefabs/{ => EntityPrefabs}/Flag zone.prefab (100%) mode change 100755 => 100644 rename Assets/Prefabs/{ => EntityPrefabs}/Flag zone.prefab.meta (100%) mode change 100755 => 100644 create mode 100644 Assets/Prefabs/EntityPrefabs/navPointPrefab.prefab create mode 100644 Assets/Prefabs/EntityPrefabs/navPointPrefab.prefab.meta create mode 100644 Assets/Prefabs/PickupPrefabs.meta rename Assets/Prefabs/{ => PickupPrefabs}/ammo.prefab (100%) mode change 100755 => 100644 rename Assets/Prefabs/{ => PickupPrefabs}/ammo.prefab.meta (100%) mode change 100755 => 100644 rename Assets/Prefabs/{ => PickupPrefabs}/armour.prefab (100%) mode change 100755 => 100644 rename Assets/Prefabs/{ => PickupPrefabs}/armour.prefab.meta (100%) mode change 100755 => 100644 rename Assets/Prefabs/{ => PickupPrefabs}/health.prefab (100%) mode change 100755 => 100644 rename Assets/Prefabs/{ => PickupPrefabs}/health.prefab.meta (100%) mode change 100755 => 100644 create mode 100644 Assets/Resources.meta create mode 100644 Assets/Resources/BillingMode.json rename Assets/{ML-Agents/Timers/DemoScene01_timers.json.meta => Resources/BillingMode.json.meta} (75%) mode change 100755 => 100644 create mode 100644 Assets/Scenes/AnimationTestScene.unity rename Assets/Scenes/{dont touch me plz.unity.meta => AnimationTestScene.unity.meta} (74%) mode change 100755 => 100644 rename Assets/Scenes/{First try.unity => Map#1.unity} (100%) mode change 100755 => 100644 rename Assets/Scenes/{First try.unity.meta => Map#1.unity.meta} (100%) mode change 100755 => 100644 delete mode 100755 Assets/Scenes/dont touch me plz.unity create mode 100644 Assets/Scripts/Animators.meta create mode 100644 Assets/Scripts/Animators/Kirill Animator.meta rename Assets/Scripts/{Character/scr_CharacterController.cs => Animators/Kirill Animator/CustomCharacterController.cs} (95%) mode change 100755 => 100644 rename Assets/Scripts/{Character/scr_CharacterController.cs.meta => Animators/Kirill Animator/CustomCharacterController.cs.meta} (100%) mode change 100755 => 100644 create mode 100644 Assets/Scripts/Animators/Kirill Animator/Models.cs rename Assets/Scripts/{Character/scr_Models.cs.meta => Animators/Kirill Animator/Models.cs.meta} (100%) mode change 100755 => 100644 create mode 100644 Assets/Scripts/Animators/Leonid Animator.meta create mode 100644 Assets/Scripts/Animators/Leonid Animator/AnimatorHandler.cs create mode 100644 Assets/Scripts/Animators/Leonid Animator/AnimatorHandler.cs.meta rename Assets/{SciFiWarriorPBRHPPolyart/Animators/SciFiWarrior.controller => Scripts/Animators/Leonid Animator/CharacterAnimator.controller} (56%) mode change 100755 => 100644 rename Assets/{Defaults/mat_proBuilder.mat.meta => Scripts/Animators/Leonid Animator/CharacterAnimator.controller.meta} (64%) mode change 100755 => 100644 create mode 100644 Assets/Scripts/Animators/Leonid Animator/CharacterLocomotion.cs create mode 100644 Assets/Scripts/Animators/Leonid Animator/CharacterLocomotion.cs.meta create mode 100644 Assets/Scripts/Animators/Leonid Animator/InputHandler.cs create mode 100644 Assets/Scripts/Animators/Leonid Animator/InputHandler.cs.meta create mode 100644 Assets/Scripts/Animators/Leonid Animator/LowerBody.mask create mode 100644 Assets/Scripts/Animators/Leonid Animator/LowerBody.mask.meta create mode 100644 Assets/Scripts/Animators/Leonid Animator/UpperBody.mask create mode 100644 Assets/Scripts/Animators/Leonid Animator/UpperBody.mask.meta create mode 100644 Assets/Scripts/CameraScripts.meta create mode 100644 Assets/Scripts/CameraScripts/CameraHandler.cs create mode 100644 Assets/Scripts/CameraScripts/CameraHandler.cs.meta delete mode 100755 Assets/Scripts/Character/scr_Models.cs rename Assets/Scripts/Weapons/{scr_WeaponController.cs => WeaponController.cs} (96%) mode change 100755 => 100644 rename Assets/Scripts/Weapons/{scr_WeaponController.cs.meta => WeaponController.cs.meta} (100%) mode change 100755 => 100644 create mode 100644 ProjectSettings/ProjectVersion.txt create mode 100644 test-ml-agents.ipynb diff --git a/Assets/Defaults.meta b/Assets/Arts.meta old mode 100755 new mode 100644 similarity index 77% rename from Assets/Defaults.meta rename to Assets/Arts.meta index f41c53d..a5b76ea --- a/Assets/Defaults.meta +++ b/Assets/Arts.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: b183268306ddaac4b875d5d435faa50b +guid: 0d3fef053a0198e44a77e5bbb1c9e6ba folderAsset: yes DefaultImporter: externalObjects: {} diff --git a/Assets/Materials.meta b/Assets/Arts/Materials.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Materials.meta rename to Assets/Arts/Materials.meta diff --git a/Assets/Materials/New Material.mat b/Assets/Arts/Materials/New Material.mat old mode 100755 new mode 100644 similarity index 100% rename from Assets/Materials/New Material.mat rename to Assets/Arts/Materials/New Material.mat diff --git a/Assets/Materials/New Material.mat.meta b/Assets/Arts/Materials/New Material.mat.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Materials/New Material.mat.meta rename to Assets/Arts/Materials/New Material.mat.meta diff --git a/Assets/Models.meta b/Assets/Arts/Models.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Models.meta rename to Assets/Arts/Models.meta diff --git a/Assets/Models/Flag model.fbx b/Assets/Arts/Models/Flag model.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/Models/Flag model.fbx rename to Assets/Arts/Models/Flag model.fbx diff --git a/Assets/Models/Flag model.fbx.meta b/Assets/Arts/Models/Flag model.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Models/Flag model.fbx.meta rename to Assets/Arts/Models/Flag model.fbx.meta diff --git a/Assets/Models/Textures.meta b/Assets/Arts/Models/Textures.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Models/Textures.meta rename to Assets/Arts/Models/Textures.meta diff --git a/Assets/Models/Textures/Flag.jpg b/Assets/Arts/Models/Textures/Flag.jpg old mode 100755 new mode 100644 similarity index 100% rename from Assets/Models/Textures/Flag.jpg rename to Assets/Arts/Models/Textures/Flag.jpg diff --git a/Assets/Models/Textures/Flag.jpg.meta b/Assets/Arts/Models/Textures/Flag.jpg.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Models/Textures/Flag.jpg.meta rename to Assets/Arts/Models/Textures/Flag.jpg.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/AvatarMask.mask b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/AvatarMask.mask old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/AvatarMask.mask rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/AvatarMask.mask diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/AvatarMask.mask.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/AvatarMask.mask.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/AvatarMask.mask.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/AvatarMask.mask.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Die.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Die.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Die.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Die.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Die.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Die.fbx.meta old mode 100755 new mode 100644 similarity index 79% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Die.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Die.fbx.meta index 7d5f536..5e3e975 --- a/Assets/SciFiWarriorPBRHPPolyart/Animations/Die.fbx.meta +++ b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Die.fbx.meta @@ -1,160 +1,458 @@ fileFormatVersion: 2 guid: d406f8f3cbe268f4e9d0234d45cca60c ModelImporter: - serializedVersion: 23 - fileIDToRecycleName: - 100000: Arm1 - 100002: ArmPlacement_Left - 100004: ArmPlacement_Right - 100006: ArmPlacement_Upper - 100008: ArmPosition_Left - 100010: ArmPosition_Right - 100012: AssaultRifle - 100014: BackPack - 100016: Backpack1 - 100018: Body1 - 100020: Chest - 100022: //RootNode - 100024: Foot_Left - 100026: Foot_Right - 100028: Hand_Left - 100030: Hand_Right - 100032: Head - 100034: head1 - 100036: Headgear_Left - 100038: Headgear_Right - 100040: Hips - 100042: Index_Distal_Left - 100044: Index_Distal_Right - 100046: Index_Intermediate_Left - 100048: Index_Intermediate_Right - 100050: Index_Proximal_Left - 100052: Index_Proximal_Right - 100054: Leg1 - 100056: LowerArm_Left - 100058: LowerArm_Right - 100060: LowerLeg_Left - 100062: LowerLeg_Right - 100064: magazine_Right - 100066: Neck - 100068: RestOfFingers_Distal_Left - 100070: RestOfFingers_Distal_Right - 100072: RestOfFingers_Intermediate_Left - 100074: RestOfFingers_Intermediate_Right - 100076: RestOfFingers_Proximal_Left - 100078: RestOfFingers_Proximal_Right - 100080: Shoulder_Left - 100082: Shoulder_Right - 100084: ShoulderPadBlade_Left - 100086: ShoulderPadBlade_Right - 100088: ShoulderPadBody_Left - 100090: ShoulderPadBody_Right - 100092: ShoulderPadCTRL_Left - 100094: ShoulderPadCTRL_Right - 100096: Spine - 100098: Thumb_Distal_Left - 100100: Thumb_Distal_Right - 100102: Thumb_Intermediate_Left - 100104: Thumb_Intermediate_Right - 100106: Thumb_Proximal_Left - 100108: Thumb_Proximal_Right - 100110: Toe_Left - 100112: Toe_Right - 100114: Toetip_Left - 100116: Toetip_Right - 100118: Trigger_Right - 100120: UpperArm_Left - 100122: UpperArm_Right - 100124: UpperLeg_Left - 100126: UpperLeg_Right - 400000: Arm1 - 400002: ArmPlacement_Left - 400004: ArmPlacement_Right - 400006: ArmPlacement_Upper - 400008: ArmPosition_Left - 400010: ArmPosition_Right - 400012: AssaultRifle - 400014: BackPack - 400016: Backpack1 - 400018: Body1 - 400020: Chest - 400022: //RootNode - 400024: Foot_Left - 400026: Foot_Right - 400028: Hand_Left - 400030: Hand_Right - 400032: Head - 400034: head1 - 400036: Headgear_Left - 400038: Headgear_Right - 400040: Hips - 400042: Index_Distal_Left - 400044: Index_Distal_Right - 400046: Index_Intermediate_Left - 400048: Index_Intermediate_Right - 400050: Index_Proximal_Left - 400052: Index_Proximal_Right - 400054: Leg1 - 400056: LowerArm_Left - 400058: LowerArm_Right - 400060: LowerLeg_Left - 400062: LowerLeg_Right - 400064: magazine_Right - 400066: Neck - 400068: RestOfFingers_Distal_Left - 400070: RestOfFingers_Distal_Right - 400072: RestOfFingers_Intermediate_Left - 400074: RestOfFingers_Intermediate_Right - 400076: RestOfFingers_Proximal_Left - 400078: RestOfFingers_Proximal_Right - 400080: Shoulder_Left - 400082: Shoulder_Right - 400084: ShoulderPadBlade_Left - 400086: ShoulderPadBlade_Right - 400088: ShoulderPadBody_Left - 400090: ShoulderPadBody_Right - 400092: ShoulderPadCTRL_Left - 400094: ShoulderPadCTRL_Right - 400096: Spine - 400098: Thumb_Distal_Left - 400100: Thumb_Distal_Right - 400102: Thumb_Intermediate_Left - 400104: Thumb_Intermediate_Right - 400106: Thumb_Proximal_Left - 400108: Thumb_Proximal_Right - 400110: Toe_Left - 400112: Toe_Right - 400114: Toetip_Left - 400116: Toetip_Right - 400118: Trigger_Right - 400120: UpperArm_Left - 400122: UpperArm_Right - 400124: UpperLeg_Left - 400126: UpperLeg_Right - 2100000: w_h_01 - 2100002: w_b_01 - 2100004: w_bp_01 - 2100006: w_a_01 - 2100008: w_f_01 - 2100010: w_w_01 - 4300000: head1 - 4300002: Body1 - 4300004: Backpack1 - 4300006: Arm1 - 4300008: Leg1 - 4300010: AssaultRifle - 7400000: Die - 9500000: //RootNode - 13700000: Arm1 - 13700002: AssaultRifle - 13700004: Backpack1 - 13700006: Body1 - 13700008: head1 - 13700010: Leg1 - 2186277476908879412: ImportLogs + serializedVersion: 19301 + internalIDToNameTable: + - first: + 1: 100000 + second: Arm1 + - first: + 1: 100002 + second: ArmPlacement_Left + - first: + 1: 100004 + second: ArmPlacement_Right + - first: + 1: 100006 + second: ArmPlacement_Upper + - first: + 1: 100008 + second: ArmPosition_Left + - first: + 1: 100010 + second: ArmPosition_Right + - first: + 1: 100012 + second: AssaultRifle + - first: + 1: 100014 + second: BackPack + - first: + 1: 100016 + second: Backpack1 + - first: + 1: 100018 + second: Body1 + - first: + 1: 100020 + second: Chest + - first: + 1: 100022 + second: //RootNode + - first: + 1: 100024 + second: Foot_Left + - first: + 1: 100026 + second: Foot_Right + - first: + 1: 100028 + second: Hand_Left + - first: + 1: 100030 + second: Hand_Right + - first: + 1: 100032 + second: Head + - first: + 1: 100034 + second: head1 + - first: + 1: 100036 + second: Headgear_Left + - first: + 1: 100038 + second: Headgear_Right + - first: + 1: 100040 + second: Hips + - first: + 1: 100042 + second: Index_Distal_Left + - first: + 1: 100044 + second: Index_Distal_Right + - first: + 1: 100046 + second: Index_Intermediate_Left + - first: + 1: 100048 + second: Index_Intermediate_Right + - first: + 1: 100050 + second: Index_Proximal_Left + - first: + 1: 100052 + second: Index_Proximal_Right + - first: + 1: 100054 + second: Leg1 + - first: + 1: 100056 + second: LowerArm_Left + - first: + 1: 100058 + second: LowerArm_Right + - first: + 1: 100060 + second: LowerLeg_Left + - first: + 1: 100062 + second: LowerLeg_Right + - first: + 1: 100064 + second: magazine_Right + - first: + 1: 100066 + second: Neck + - first: + 1: 100068 + second: RestOfFingers_Distal_Left + - first: + 1: 100070 + second: RestOfFingers_Distal_Right + - first: + 1: 100072 + second: RestOfFingers_Intermediate_Left + - first: + 1: 100074 + second: RestOfFingers_Intermediate_Right + - first: + 1: 100076 + second: RestOfFingers_Proximal_Left + - first: + 1: 100078 + second: RestOfFingers_Proximal_Right + - first: + 1: 100080 + second: Shoulder_Left + - first: + 1: 100082 + second: Shoulder_Right + - first: + 1: 100084 + second: ShoulderPadBlade_Left + - first: + 1: 100086 + second: ShoulderPadBlade_Right + - first: + 1: 100088 + second: ShoulderPadBody_Left + - first: + 1: 100090 + second: ShoulderPadBody_Right + - first: + 1: 100092 + second: ShoulderPadCTRL_Left + - first: + 1: 100094 + second: ShoulderPadCTRL_Right + - first: + 1: 100096 + second: Spine + - first: + 1: 100098 + second: Thumb_Distal_Left + - first: + 1: 100100 + second: Thumb_Distal_Right + - first: + 1: 100102 + second: Thumb_Intermediate_Left + - first: + 1: 100104 + second: Thumb_Intermediate_Right + - first: + 1: 100106 + second: Thumb_Proximal_Left + - first: + 1: 100108 + second: Thumb_Proximal_Right + - first: + 1: 100110 + second: Toe_Left + - first: + 1: 100112 + second: Toe_Right + - first: + 1: 100114 + second: Toetip_Left + - first: + 1: 100116 + second: Toetip_Right + - first: + 1: 100118 + second: Trigger_Right + - first: + 1: 100120 + second: UpperArm_Left + - first: + 1: 100122 + second: UpperArm_Right + - first: + 1: 100124 + second: UpperLeg_Left + - first: + 1: 100126 + second: UpperLeg_Right + - first: + 4: 400000 + second: Arm1 + - first: + 4: 400002 + second: ArmPlacement_Left + - first: + 4: 400004 + second: ArmPlacement_Right + - first: + 4: 400006 + second: ArmPlacement_Upper + - first: + 4: 400008 + second: ArmPosition_Left + - first: + 4: 400010 + second: ArmPosition_Right + - first: + 4: 400012 + second: AssaultRifle + - first: + 4: 400014 + second: BackPack + - first: + 4: 400016 + second: Backpack1 + - first: + 4: 400018 + second: Body1 + - first: + 4: 400020 + second: Chest + - first: + 4: 400022 + second: //RootNode + - first: + 4: 400024 + second: Foot_Left + - first: + 4: 400026 + second: Foot_Right + - first: + 4: 400028 + second: Hand_Left + - first: + 4: 400030 + second: Hand_Right + - first: + 4: 400032 + second: Head + - first: + 4: 400034 + second: head1 + - first: + 4: 400036 + second: Headgear_Left + - first: + 4: 400038 + second: Headgear_Right + - first: + 4: 400040 + second: Hips + - first: + 4: 400042 + second: Index_Distal_Left + - first: + 4: 400044 + second: Index_Distal_Right + - first: + 4: 400046 + second: Index_Intermediate_Left + - first: + 4: 400048 + second: Index_Intermediate_Right + - first: + 4: 400050 + second: Index_Proximal_Left + - first: + 4: 400052 + second: Index_Proximal_Right + - first: + 4: 400054 + second: Leg1 + - first: + 4: 400056 + second: LowerArm_Left + - first: + 4: 400058 + second: LowerArm_Right + - first: + 4: 400060 + second: LowerLeg_Left + - first: + 4: 400062 + second: LowerLeg_Right + - first: + 4: 400064 + second: magazine_Right + - first: + 4: 400066 + second: Neck + - first: + 4: 400068 + second: RestOfFingers_Distal_Left + - first: + 4: 400070 + second: RestOfFingers_Distal_Right + - first: + 4: 400072 + second: RestOfFingers_Intermediate_Left + - first: + 4: 400074 + second: RestOfFingers_Intermediate_Right + - first: + 4: 400076 + second: RestOfFingers_Proximal_Left + - first: + 4: 400078 + second: RestOfFingers_Proximal_Right + - first: + 4: 400080 + second: Shoulder_Left + - first: + 4: 400082 + second: Shoulder_Right + - first: + 4: 400084 + second: ShoulderPadBlade_Left + - first: + 4: 400086 + second: ShoulderPadBlade_Right + - first: + 4: 400088 + second: ShoulderPadBody_Left + - first: + 4: 400090 + second: ShoulderPadBody_Right + - first: + 4: 400092 + second: ShoulderPadCTRL_Left + - first: + 4: 400094 + second: ShoulderPadCTRL_Right + - first: + 4: 400096 + second: Spine + - first: + 4: 400098 + second: Thumb_Distal_Left + - first: + 4: 400100 + second: Thumb_Distal_Right + - first: + 4: 400102 + second: Thumb_Intermediate_Left + - first: + 4: 400104 + second: Thumb_Intermediate_Right + - first: + 4: 400106 + second: Thumb_Proximal_Left + - first: + 4: 400108 + second: Thumb_Proximal_Right + - first: + 4: 400110 + second: Toe_Left + - first: + 4: 400112 + second: Toe_Right + - first: + 4: 400114 + second: Toetip_Left + - first: + 4: 400116 + second: Toetip_Right + - first: + 4: 400118 + second: Trigger_Right + - first: + 4: 400120 + second: UpperArm_Left + - first: + 4: 400122 + second: UpperArm_Right + - first: + 4: 400124 + second: UpperLeg_Left + - first: + 4: 400126 + second: UpperLeg_Right + - first: + 21: 2100000 + second: w_h_01 + - first: + 21: 2100002 + second: w_b_01 + - first: + 21: 2100004 + second: w_bp_01 + - first: + 21: 2100006 + second: w_a_01 + - first: + 21: 2100008 + second: w_f_01 + - first: + 21: 2100010 + second: w_w_01 + - first: + 43: 4300000 + second: head1 + - first: + 43: 4300002 + second: Body1 + - first: + 43: 4300004 + second: Backpack1 + - first: + 43: 4300006 + second: Arm1 + - first: + 43: 4300008 + second: Leg1 + - first: + 43: 4300010 + second: AssaultRifle + - first: + 74: 7400000 + second: Die + - first: + 95: 9500000 + second: //RootNode + - first: + 137: 13700000 + second: Arm1 + - first: + 137: 13700002 + second: AssaultRifle + - first: + 137: 13700004 + second: Backpack1 + - first: + 137: 13700006 + second: Body1 + - first: + 137: 13700008 + second: head1 + - first: + 137: 13700010 + second: Leg1 + - first: + 41386430: 2186277476908879412 + second: ImportLogs externalObjects: {} materials: - importMaterials: 1 + materialImportMode: 1 materialName: 0 materialSearch: 1 materialLocation: 1 @@ -183,6 +481,7 @@ ModelImporter: - serializedVersion: 16 name: Die takeName: Take 001 + internalID: 0 firstFrame: 0 lastFrame: 32 wrapMode: 0 @@ -343,25 +642,28 @@ ModelImporter: meshCompression: 0 addColliders: 0 useSRGBMaterialColor: 1 + sortHierarchyByName: 1 importVisibility: 1 importBlendShapes: 1 importCameras: 1 importLights: 1 + fileIdsGeneration: 1 swapUVChannels: 0 generateSecondaryUV: 0 useFileUnits: 1 - optimizeMeshForGPU: 1 keepQuads: 0 weldVertices: 1 preserveHierarchy: 0 + skinWeightsMode: 0 + maxBonesPerVertex: 4 + minBoneWeight: 0.001 + meshOptimizationFlags: -1 indexFormat: 0 secondaryUVAngleDistortion: 8 secondaryUVAreaDistortion: 15.000001 secondaryUVHardAngle: 88 secondaryUVPackMargin: 4 useFileScale: 1 - previousCalculatedGlobalScale: 1 - hasPreviousCalculatedGlobalScale: 1 tangentSpace: normalSmoothAngle: 60 normalImportMode: 0 @@ -370,10 +672,10 @@ ModelImporter: legacyComputeAllNormalsFromSmoothingGroupsWhenMeshHasBlendShapes: 0 blendShapeNormalImportMode: 1 normalSmoothingSource: 0 + referencedClips: [] importAnimation: 1 - copyAvatar: 0 humanDescription: - serializedVersion: 2 + serializedVersion: 3 human: - boneName: Hips humanName: Hips @@ -1015,13 +1317,16 @@ ModelImporter: armStretch: 0.05 legStretch: 0.05 feetSpacing: 0 + globalScale: 1 rootMotionBoneName: hasTranslationDoF: 0 hasExtraRoot: 1 skeletonHasParents: 1 lastHumanDescriptionAvatarSource: {instanceID: 0} + autoGenerateAvatarMappingIfUnspecified: 1 animationType: 3 humanoidOversampling: 1 + avatarSetup: 1 additionalBone: 0 userData: assetBundleName: diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_Ducking_ar.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_Ducking_ar.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_Ducking_ar.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_Ducking_ar.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_Ducking_ar.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_Ducking_ar.fbx.meta old mode 100755 new mode 100644 similarity index 79% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_Ducking_ar.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_Ducking_ar.fbx.meta index f587e0e..b9863d4 --- a/Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_Ducking_ar.fbx.meta +++ b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_Ducking_ar.fbx.meta @@ -1,160 +1,458 @@ fileFormatVersion: 2 guid: 3ad7c5979f6586d4a9532a55492a0ebe ModelImporter: - serializedVersion: 23 - fileIDToRecycleName: - 100000: Arm1 - 100002: ArmPlacement_Left - 100004: ArmPlacement_Right - 100006: ArmPlacement_Upper - 100008: ArmPosition_Left - 100010: ArmPosition_Right - 100012: AssaultRifle - 100014: BackPack - 100016: Backpack1 - 100018: Body1 - 100020: Chest - 100022: Foot_Left - 100024: Foot_Right - 100026: Hand_Left - 100028: Hand_Right - 100030: Head - 100032: head1 - 100034: Headgear_Left - 100036: Headgear_Right - 100038: Hips - 100040: //RootNode - 100042: Index_Distal_Left - 100044: Index_Distal_Right - 100046: Index_Intermediate_Left - 100048: Index_Intermediate_Right - 100050: Index_Proximal_Left - 100052: Index_Proximal_Right - 100054: Leg1 - 100056: LowerArm_Left - 100058: LowerArm_Right - 100060: LowerLeg_Left - 100062: LowerLeg_Right - 100064: magazine_Right - 100066: Neck - 100068: RestOfFingers_Distal_Left - 100070: RestOfFingers_Distal_Right - 100072: RestOfFingers_Intermediate_Left - 100074: RestOfFingers_Intermediate_Right - 100076: RestOfFingers_Proximal_Left - 100078: RestOfFingers_Proximal_Right - 100080: Shoulder_Left - 100082: Shoulder_Right - 100084: ShoulderPadBlade_Left - 100086: ShoulderPadBlade_Right - 100088: ShoulderPadBody_Left - 100090: ShoulderPadBody_Right - 100092: ShoulderPadCTRL_Left - 100094: ShoulderPadCTRL_Right - 100096: Spine - 100098: Thumb_Distal_Left - 100100: Thumb_Distal_Right - 100102: Thumb_Intermediate_Left - 100104: Thumb_Intermediate_Right - 100106: Thumb_Proximal_Left - 100108: Thumb_Proximal_Right - 100110: Toe_Left - 100112: Toe_Right - 100114: Toetip_Left - 100116: Toetip_Right - 100118: Trigger_Right - 100120: UpperArm_Left - 100122: UpperArm_Right - 100124: UpperLeg_Left - 100126: UpperLeg_Right - 400000: Arm1 - 400002: ArmPlacement_Left - 400004: ArmPlacement_Right - 400006: ArmPlacement_Upper - 400008: ArmPosition_Left - 400010: ArmPosition_Right - 400012: AssaultRifle - 400014: BackPack - 400016: Backpack1 - 400018: Body1 - 400020: Chest - 400022: Foot_Left - 400024: Foot_Right - 400026: Hand_Left - 400028: Hand_Right - 400030: Head - 400032: head1 - 400034: Headgear_Left - 400036: Headgear_Right - 400038: Hips - 400040: //RootNode - 400042: Index_Distal_Left - 400044: Index_Distal_Right - 400046: Index_Intermediate_Left - 400048: Index_Intermediate_Right - 400050: Index_Proximal_Left - 400052: Index_Proximal_Right - 400054: Leg1 - 400056: LowerArm_Left - 400058: LowerArm_Right - 400060: LowerLeg_Left - 400062: LowerLeg_Right - 400064: magazine_Right - 400066: Neck - 400068: RestOfFingers_Distal_Left - 400070: RestOfFingers_Distal_Right - 400072: RestOfFingers_Intermediate_Left - 400074: RestOfFingers_Intermediate_Right - 400076: RestOfFingers_Proximal_Left - 400078: RestOfFingers_Proximal_Right - 400080: Shoulder_Left - 400082: Shoulder_Right - 400084: ShoulderPadBlade_Left - 400086: ShoulderPadBlade_Right - 400088: ShoulderPadBody_Left - 400090: ShoulderPadBody_Right - 400092: ShoulderPadCTRL_Left - 400094: ShoulderPadCTRL_Right - 400096: Spine - 400098: Thumb_Distal_Left - 400100: Thumb_Distal_Right - 400102: Thumb_Intermediate_Left - 400104: Thumb_Intermediate_Right - 400106: Thumb_Proximal_Left - 400108: Thumb_Proximal_Right - 400110: Toe_Left - 400112: Toe_Right - 400114: Toetip_Left - 400116: Toetip_Right - 400118: Trigger_Right - 400120: UpperArm_Left - 400122: UpperArm_Right - 400124: UpperLeg_Left - 400126: UpperLeg_Right - 2100000: w_h_01 - 2100002: w_b_01 - 2100004: w_bp_01 - 2100006: w_a_01 - 2100008: w_f_01 - 2100010: w_w_01 - 4300000: head1 - 4300002: Body1 - 4300004: Backpack1 - 4300006: Arm1 - 4300008: Leg1 - 4300010: AssaultRifle - 7400000: Idle_Ducking_AR - 9500000: //RootNode - 13700000: Arm1 - 13700002: AssaultRifle - 13700004: Backpack1 - 13700006: Body1 - 13700008: head1 - 13700010: Leg1 - 2186277476908879412: ImportLogs + serializedVersion: 19301 + internalIDToNameTable: + - first: + 1: 100000 + second: Arm1 + - first: + 1: 100002 + second: ArmPlacement_Left + - first: + 1: 100004 + second: ArmPlacement_Right + - first: + 1: 100006 + second: ArmPlacement_Upper + - first: + 1: 100008 + second: ArmPosition_Left + - first: + 1: 100010 + second: ArmPosition_Right + - first: + 1: 100012 + second: AssaultRifle + - first: + 1: 100014 + second: BackPack + - first: + 1: 100016 + second: Backpack1 + - first: + 1: 100018 + second: Body1 + - first: + 1: 100020 + second: Chest + - first: + 1: 100022 + second: Foot_Left + - first: + 1: 100024 + second: Foot_Right + - first: + 1: 100026 + second: Hand_Left + - first: + 1: 100028 + second: Hand_Right + - first: + 1: 100030 + second: Head + - first: + 1: 100032 + second: head1 + - first: + 1: 100034 + second: Headgear_Left + - first: + 1: 100036 + second: Headgear_Right + - first: + 1: 100038 + second: Hips + - first: + 1: 100040 + second: //RootNode + - first: + 1: 100042 + second: Index_Distal_Left + - first: + 1: 100044 + second: Index_Distal_Right + - first: + 1: 100046 + second: Index_Intermediate_Left + - first: + 1: 100048 + second: Index_Intermediate_Right + - first: + 1: 100050 + second: Index_Proximal_Left + - first: + 1: 100052 + second: Index_Proximal_Right + - first: + 1: 100054 + second: Leg1 + - first: + 1: 100056 + second: LowerArm_Left + - first: + 1: 100058 + second: LowerArm_Right + - first: + 1: 100060 + second: LowerLeg_Left + - first: + 1: 100062 + second: LowerLeg_Right + - first: + 1: 100064 + second: magazine_Right + - first: + 1: 100066 + second: Neck + - first: + 1: 100068 + second: RestOfFingers_Distal_Left + - first: + 1: 100070 + second: RestOfFingers_Distal_Right + - first: + 1: 100072 + second: RestOfFingers_Intermediate_Left + - first: + 1: 100074 + second: RestOfFingers_Intermediate_Right + - first: + 1: 100076 + second: RestOfFingers_Proximal_Left + - first: + 1: 100078 + second: RestOfFingers_Proximal_Right + - first: + 1: 100080 + second: Shoulder_Left + - first: + 1: 100082 + second: Shoulder_Right + - first: + 1: 100084 + second: ShoulderPadBlade_Left + - first: + 1: 100086 + second: ShoulderPadBlade_Right + - first: + 1: 100088 + second: ShoulderPadBody_Left + - first: + 1: 100090 + second: ShoulderPadBody_Right + - first: + 1: 100092 + second: ShoulderPadCTRL_Left + - first: + 1: 100094 + second: ShoulderPadCTRL_Right + - first: + 1: 100096 + second: Spine + - first: + 1: 100098 + second: Thumb_Distal_Left + - first: + 1: 100100 + second: Thumb_Distal_Right + - first: + 1: 100102 + second: Thumb_Intermediate_Left + - first: + 1: 100104 + second: Thumb_Intermediate_Right + - first: + 1: 100106 + second: Thumb_Proximal_Left + - first: + 1: 100108 + second: Thumb_Proximal_Right + - first: + 1: 100110 + second: Toe_Left + - first: + 1: 100112 + second: Toe_Right + - first: + 1: 100114 + second: Toetip_Left + - first: + 1: 100116 + second: Toetip_Right + - first: + 1: 100118 + second: Trigger_Right + - first: + 1: 100120 + second: UpperArm_Left + - first: + 1: 100122 + second: UpperArm_Right + - first: + 1: 100124 + second: UpperLeg_Left + - first: + 1: 100126 + second: UpperLeg_Right + - first: + 4: 400000 + second: Arm1 + - first: + 4: 400002 + second: ArmPlacement_Left + - first: + 4: 400004 + second: ArmPlacement_Right + - first: + 4: 400006 + second: ArmPlacement_Upper + - first: + 4: 400008 + second: ArmPosition_Left + - first: + 4: 400010 + second: ArmPosition_Right + - first: + 4: 400012 + second: AssaultRifle + - first: + 4: 400014 + second: BackPack + - first: + 4: 400016 + second: Backpack1 + - first: + 4: 400018 + second: Body1 + - first: + 4: 400020 + second: Chest + - first: + 4: 400022 + second: Foot_Left + - first: + 4: 400024 + second: Foot_Right + - first: + 4: 400026 + second: Hand_Left + - first: + 4: 400028 + second: Hand_Right + - first: + 4: 400030 + second: Head + - first: + 4: 400032 + second: head1 + - first: + 4: 400034 + second: Headgear_Left + - first: + 4: 400036 + second: Headgear_Right + - first: + 4: 400038 + second: Hips + - first: + 4: 400040 + second: //RootNode + - first: + 4: 400042 + second: Index_Distal_Left + - first: + 4: 400044 + second: Index_Distal_Right + - first: + 4: 400046 + second: Index_Intermediate_Left + - first: + 4: 400048 + second: Index_Intermediate_Right + - first: + 4: 400050 + second: Index_Proximal_Left + - first: + 4: 400052 + second: Index_Proximal_Right + - first: + 4: 400054 + second: Leg1 + - first: + 4: 400056 + second: LowerArm_Left + - first: + 4: 400058 + second: LowerArm_Right + - first: + 4: 400060 + second: LowerLeg_Left + - first: + 4: 400062 + second: LowerLeg_Right + - first: + 4: 400064 + second: magazine_Right + - first: + 4: 400066 + second: Neck + - first: + 4: 400068 + second: RestOfFingers_Distal_Left + - first: + 4: 400070 + second: RestOfFingers_Distal_Right + - first: + 4: 400072 + second: RestOfFingers_Intermediate_Left + - first: + 4: 400074 + second: RestOfFingers_Intermediate_Right + - first: + 4: 400076 + second: RestOfFingers_Proximal_Left + - first: + 4: 400078 + second: RestOfFingers_Proximal_Right + - first: + 4: 400080 + second: Shoulder_Left + - first: + 4: 400082 + second: Shoulder_Right + - first: + 4: 400084 + second: ShoulderPadBlade_Left + - first: + 4: 400086 + second: ShoulderPadBlade_Right + - first: + 4: 400088 + second: ShoulderPadBody_Left + - first: + 4: 400090 + second: ShoulderPadBody_Right + - first: + 4: 400092 + second: ShoulderPadCTRL_Left + - first: + 4: 400094 + second: ShoulderPadCTRL_Right + - first: + 4: 400096 + second: Spine + - first: + 4: 400098 + second: Thumb_Distal_Left + - first: + 4: 400100 + second: Thumb_Distal_Right + - first: + 4: 400102 + second: Thumb_Intermediate_Left + - first: + 4: 400104 + second: Thumb_Intermediate_Right + - first: + 4: 400106 + second: Thumb_Proximal_Left + - first: + 4: 400108 + second: Thumb_Proximal_Right + - first: + 4: 400110 + second: Toe_Left + - first: + 4: 400112 + second: Toe_Right + - first: + 4: 400114 + second: Toetip_Left + - first: + 4: 400116 + second: Toetip_Right + - first: + 4: 400118 + second: Trigger_Right + - first: + 4: 400120 + second: UpperArm_Left + - first: + 4: 400122 + second: UpperArm_Right + - first: + 4: 400124 + second: UpperLeg_Left + - first: + 4: 400126 + second: UpperLeg_Right + - first: + 21: 2100000 + second: w_h_01 + - first: + 21: 2100002 + second: w_b_01 + - first: + 21: 2100004 + second: w_bp_01 + - first: + 21: 2100006 + second: w_a_01 + - first: + 21: 2100008 + second: w_f_01 + - first: + 21: 2100010 + second: w_w_01 + - first: + 43: 4300000 + second: head1 + - first: + 43: 4300002 + second: Body1 + - first: + 43: 4300004 + second: Backpack1 + - first: + 43: 4300006 + second: Arm1 + - first: + 43: 4300008 + second: Leg1 + - first: + 43: 4300010 + second: AssaultRifle + - first: + 74: 7400000 + second: Idle_Ducking_AR + - first: + 95: 9500000 + second: //RootNode + - first: + 137: 13700000 + second: Arm1 + - first: + 137: 13700002 + second: AssaultRifle + - first: + 137: 13700004 + second: Backpack1 + - first: + 137: 13700006 + second: Body1 + - first: + 137: 13700008 + second: head1 + - first: + 137: 13700010 + second: Leg1 + - first: + 41386430: 2186277476908879412 + second: ImportLogs externalObjects: {} materials: - importMaterials: 1 + materialImportMode: 1 materialName: 0 materialSearch: 1 materialLocation: 1 @@ -183,6 +481,7 @@ ModelImporter: - serializedVersion: 16 name: Idle_Ducking_AR takeName: Take 001 + internalID: 0 firstFrame: 0 lastFrame: 30 wrapMode: 0 @@ -191,12 +490,12 @@ ModelImporter: cycleOffset: 0 loop: 0 hasAdditiveReferencePose: 0 - loopTime: 1 + loopTime: 0 loopBlend: 0 loopBlendOrientation: 1 loopBlendPositionY: 1 loopBlendPositionXZ: 1 - keepOriginalOrientation: 1 + keepOriginalOrientation: 0 keepOriginalPositionY: 1 keepOriginalPositionXZ: 1 heightFromFeet: 0 @@ -343,25 +642,28 @@ ModelImporter: meshCompression: 0 addColliders: 0 useSRGBMaterialColor: 1 + sortHierarchyByName: 1 importVisibility: 1 importBlendShapes: 1 importCameras: 1 importLights: 1 + fileIdsGeneration: 1 swapUVChannels: 0 generateSecondaryUV: 0 useFileUnits: 1 - optimizeMeshForGPU: 1 keepQuads: 0 weldVertices: 1 preserveHierarchy: 0 + skinWeightsMode: 0 + maxBonesPerVertex: 4 + minBoneWeight: 0.001 + meshOptimizationFlags: -1 indexFormat: 0 secondaryUVAngleDistortion: 8 secondaryUVAreaDistortion: 15.000001 secondaryUVHardAngle: 88 secondaryUVPackMargin: 4 useFileScale: 1 - previousCalculatedGlobalScale: 1 - hasPreviousCalculatedGlobalScale: 1 tangentSpace: normalSmoothAngle: 60 normalImportMode: 0 @@ -370,10 +672,10 @@ ModelImporter: legacyComputeAllNormalsFromSmoothingGroupsWhenMeshHasBlendShapes: 0 blendShapeNormalImportMode: 1 normalSmoothingSource: 0 + referencedClips: [] importAnimation: 1 - copyAvatar: 0 humanDescription: - serializedVersion: 2 + serializedVersion: 3 human: - boneName: Hips humanName: Hips @@ -1015,13 +1317,16 @@ ModelImporter: armStretch: 0.05 legStretch: 0.05 feetSpacing: 0 + globalScale: 1 rootMotionBoneName: hasTranslationDoF: 0 hasExtraRoot: 1 skeletonHasParents: 1 lastHumanDescriptionAvatarSource: {instanceID: 0} + autoGenerateAvatarMappingIfUnspecified: 1 animationType: 3 humanoidOversampling: 1 + avatarSetup: 1 additionalBone: 0 userData: assetBundleName: diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_Shoot_ar.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_Shoot_ar.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_Shoot_ar.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_Shoot_ar.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_Shoot_ar.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_Shoot_ar.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_Shoot_ar.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_Shoot_ar.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_guard_ar.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_guard_ar.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_guard_ar.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_guard_ar.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_guard_ar.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_guard_ar.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_guard_ar.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_guard_ar.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_gunMiddle_ar.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_gunMiddle_ar.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_gunMiddle_ar.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_gunMiddle_ar.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_gunMiddle_ar.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_gunMiddle_ar.fbx.meta old mode 100755 new mode 100644 similarity index 79% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_gunMiddle_ar.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_gunMiddle_ar.fbx.meta index b7fa658..61e2bcd --- a/Assets/SciFiWarriorPBRHPPolyart/Animations/Idle_gunMiddle_ar.fbx.meta +++ b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Idle_gunMiddle_ar.fbx.meta @@ -1,160 +1,458 @@ fileFormatVersion: 2 guid: 107649059ea401b4e9c5c20f21e99a55 ModelImporter: - serializedVersion: 23 - fileIDToRecycleName: - 100000: Arm1 - 100002: ArmPlacement_Left - 100004: ArmPlacement_Right - 100006: ArmPlacement_Upper - 100008: ArmPosition_Left - 100010: ArmPosition_Right - 100012: AssaultRifle - 100014: BackPack - 100016: Backpack1 - 100018: Body1 - 100020: Chest - 100022: Foot_Left - 100024: Foot_Right - 100026: Hand_Left - 100028: Hand_Right - 100030: Head - 100032: head1 - 100034: Headgear_Left - 100036: Headgear_Right - 100038: Hips - 100040: //RootNode - 100042: Index_Distal_Left - 100044: Index_Distal_Right - 100046: Index_Intermediate_Left - 100048: Index_Intermediate_Right - 100050: Index_Proximal_Left - 100052: Index_Proximal_Right - 100054: Leg1 - 100056: LowerArm_Left - 100058: LowerArm_Right - 100060: LowerLeg_Left - 100062: LowerLeg_Right - 100064: magazine_Right - 100066: Neck - 100068: RestOfFingers_Distal_Left - 100070: RestOfFingers_Distal_Right - 100072: RestOfFingers_Intermediate_Left - 100074: RestOfFingers_Intermediate_Right - 100076: RestOfFingers_Proximal_Left - 100078: RestOfFingers_Proximal_Right - 100080: Shoulder_Left - 100082: Shoulder_Right - 100084: ShoulderPadBlade_Left - 100086: ShoulderPadBlade_Right - 100088: ShoulderPadBody_Left - 100090: ShoulderPadBody_Right - 100092: ShoulderPadCTRL_Left - 100094: ShoulderPadCTRL_Right - 100096: Spine - 100098: Thumb_Distal_Left - 100100: Thumb_Distal_Right - 100102: Thumb_Intermediate_Left - 100104: Thumb_Intermediate_Right - 100106: Thumb_Proximal_Left - 100108: Thumb_Proximal_Right - 100110: Toe_Left - 100112: Toe_Right - 100114: Toetip_Left - 100116: Toetip_Right - 100118: Trigger_Right - 100120: UpperArm_Left - 100122: UpperArm_Right - 100124: UpperLeg_Left - 100126: UpperLeg_Right - 400000: Arm1 - 400002: ArmPlacement_Left - 400004: ArmPlacement_Right - 400006: ArmPlacement_Upper - 400008: ArmPosition_Left - 400010: ArmPosition_Right - 400012: AssaultRifle - 400014: BackPack - 400016: Backpack1 - 400018: Body1 - 400020: Chest - 400022: Foot_Left - 400024: Foot_Right - 400026: Hand_Left - 400028: Hand_Right - 400030: Head - 400032: head1 - 400034: Headgear_Left - 400036: Headgear_Right - 400038: Hips - 400040: //RootNode - 400042: Index_Distal_Left - 400044: Index_Distal_Right - 400046: Index_Intermediate_Left - 400048: Index_Intermediate_Right - 400050: Index_Proximal_Left - 400052: Index_Proximal_Right - 400054: Leg1 - 400056: LowerArm_Left - 400058: LowerArm_Right - 400060: LowerLeg_Left - 400062: LowerLeg_Right - 400064: magazine_Right - 400066: Neck - 400068: RestOfFingers_Distal_Left - 400070: RestOfFingers_Distal_Right - 400072: RestOfFingers_Intermediate_Left - 400074: RestOfFingers_Intermediate_Right - 400076: RestOfFingers_Proximal_Left - 400078: RestOfFingers_Proximal_Right - 400080: Shoulder_Left - 400082: Shoulder_Right - 400084: ShoulderPadBlade_Left - 400086: ShoulderPadBlade_Right - 400088: ShoulderPadBody_Left - 400090: ShoulderPadBody_Right - 400092: ShoulderPadCTRL_Left - 400094: ShoulderPadCTRL_Right - 400096: Spine - 400098: Thumb_Distal_Left - 400100: Thumb_Distal_Right - 400102: Thumb_Intermediate_Left - 400104: Thumb_Intermediate_Right - 400106: Thumb_Proximal_Left - 400108: Thumb_Proximal_Right - 400110: Toe_Left - 400112: Toe_Right - 400114: Toetip_Left - 400116: Toetip_Right - 400118: Trigger_Right - 400120: UpperArm_Left - 400122: UpperArm_Right - 400124: UpperLeg_Left - 400126: UpperLeg_Right - 2100000: w_h_01 - 2100002: w_b_01 - 2100004: w_bp_01 - 2100006: w_a_01 - 2100008: w_f_01 - 2100010: w_w_01 - 4300000: head1 - 4300002: Body1 - 4300004: Backpack1 - 4300006: Arm1 - 4300008: Leg1 - 4300010: AssaultRifle - 7400000: Idle_gunMiddle_AR - 9500000: //RootNode - 13700000: Arm1 - 13700002: AssaultRifle - 13700004: Backpack1 - 13700006: Body1 - 13700008: head1 - 13700010: Leg1 - 2186277476908879412: ImportLogs + serializedVersion: 19301 + internalIDToNameTable: + - first: + 1: 100000 + second: Arm1 + - first: + 1: 100002 + second: ArmPlacement_Left + - first: + 1: 100004 + second: ArmPlacement_Right + - first: + 1: 100006 + second: ArmPlacement_Upper + - first: + 1: 100008 + second: ArmPosition_Left + - first: + 1: 100010 + second: ArmPosition_Right + - first: + 1: 100012 + second: AssaultRifle + - first: + 1: 100014 + second: BackPack + - first: + 1: 100016 + second: Backpack1 + - first: + 1: 100018 + second: Body1 + - first: + 1: 100020 + second: Chest + - first: + 1: 100022 + second: Foot_Left + - first: + 1: 100024 + second: Foot_Right + - first: + 1: 100026 + second: Hand_Left + - first: + 1: 100028 + second: Hand_Right + - first: + 1: 100030 + second: Head + - first: + 1: 100032 + second: head1 + - first: + 1: 100034 + second: Headgear_Left + - first: + 1: 100036 + second: Headgear_Right + - first: + 1: 100038 + second: Hips + - first: + 1: 100040 + second: //RootNode + - first: + 1: 100042 + second: Index_Distal_Left + - first: + 1: 100044 + second: Index_Distal_Right + - first: + 1: 100046 + second: Index_Intermediate_Left + - first: + 1: 100048 + second: Index_Intermediate_Right + - first: + 1: 100050 + second: Index_Proximal_Left + - first: + 1: 100052 + second: Index_Proximal_Right + - first: + 1: 100054 + second: Leg1 + - first: + 1: 100056 + second: LowerArm_Left + - first: + 1: 100058 + second: LowerArm_Right + - first: + 1: 100060 + second: LowerLeg_Left + - first: + 1: 100062 + second: LowerLeg_Right + - first: + 1: 100064 + second: magazine_Right + - first: + 1: 100066 + second: Neck + - first: + 1: 100068 + second: RestOfFingers_Distal_Left + - first: + 1: 100070 + second: RestOfFingers_Distal_Right + - first: + 1: 100072 + second: RestOfFingers_Intermediate_Left + - first: + 1: 100074 + second: RestOfFingers_Intermediate_Right + - first: + 1: 100076 + second: RestOfFingers_Proximal_Left + - first: + 1: 100078 + second: RestOfFingers_Proximal_Right + - first: + 1: 100080 + second: Shoulder_Left + - first: + 1: 100082 + second: Shoulder_Right + - first: + 1: 100084 + second: ShoulderPadBlade_Left + - first: + 1: 100086 + second: ShoulderPadBlade_Right + - first: + 1: 100088 + second: ShoulderPadBody_Left + - first: + 1: 100090 + second: ShoulderPadBody_Right + - first: + 1: 100092 + second: ShoulderPadCTRL_Left + - first: + 1: 100094 + second: ShoulderPadCTRL_Right + - first: + 1: 100096 + second: Spine + - first: + 1: 100098 + second: Thumb_Distal_Left + - first: + 1: 100100 + second: Thumb_Distal_Right + - first: + 1: 100102 + second: Thumb_Intermediate_Left + - first: + 1: 100104 + second: Thumb_Intermediate_Right + - first: + 1: 100106 + second: Thumb_Proximal_Left + - first: + 1: 100108 + second: Thumb_Proximal_Right + - first: + 1: 100110 + second: Toe_Left + - first: + 1: 100112 + second: Toe_Right + - first: + 1: 100114 + second: Toetip_Left + - first: + 1: 100116 + second: Toetip_Right + - first: + 1: 100118 + second: Trigger_Right + - first: + 1: 100120 + second: UpperArm_Left + - first: + 1: 100122 + second: UpperArm_Right + - first: + 1: 100124 + second: UpperLeg_Left + - first: + 1: 100126 + second: UpperLeg_Right + - first: + 4: 400000 + second: Arm1 + - first: + 4: 400002 + second: ArmPlacement_Left + - first: + 4: 400004 + second: ArmPlacement_Right + - first: + 4: 400006 + second: ArmPlacement_Upper + - first: + 4: 400008 + second: ArmPosition_Left + - first: + 4: 400010 + second: ArmPosition_Right + - first: + 4: 400012 + second: AssaultRifle + - first: + 4: 400014 + second: BackPack + - first: + 4: 400016 + second: Backpack1 + - first: + 4: 400018 + second: Body1 + - first: + 4: 400020 + second: Chest + - first: + 4: 400022 + second: Foot_Left + - first: + 4: 400024 + second: Foot_Right + - first: + 4: 400026 + second: Hand_Left + - first: + 4: 400028 + second: Hand_Right + - first: + 4: 400030 + second: Head + - first: + 4: 400032 + second: head1 + - first: + 4: 400034 + second: Headgear_Left + - first: + 4: 400036 + second: Headgear_Right + - first: + 4: 400038 + second: Hips + - first: + 4: 400040 + second: //RootNode + - first: + 4: 400042 + second: Index_Distal_Left + - first: + 4: 400044 + second: Index_Distal_Right + - first: + 4: 400046 + second: Index_Intermediate_Left + - first: + 4: 400048 + second: Index_Intermediate_Right + - first: + 4: 400050 + second: Index_Proximal_Left + - first: + 4: 400052 + second: Index_Proximal_Right + - first: + 4: 400054 + second: Leg1 + - first: + 4: 400056 + second: LowerArm_Left + - first: + 4: 400058 + second: LowerArm_Right + - first: + 4: 400060 + second: LowerLeg_Left + - first: + 4: 400062 + second: LowerLeg_Right + - first: + 4: 400064 + second: magazine_Right + - first: + 4: 400066 + second: Neck + - first: + 4: 400068 + second: RestOfFingers_Distal_Left + - first: + 4: 400070 + second: RestOfFingers_Distal_Right + - first: + 4: 400072 + second: RestOfFingers_Intermediate_Left + - first: + 4: 400074 + second: RestOfFingers_Intermediate_Right + - first: + 4: 400076 + second: RestOfFingers_Proximal_Left + - first: + 4: 400078 + second: RestOfFingers_Proximal_Right + - first: + 4: 400080 + second: Shoulder_Left + - first: + 4: 400082 + second: Shoulder_Right + - first: + 4: 400084 + second: ShoulderPadBlade_Left + - first: + 4: 400086 + second: ShoulderPadBlade_Right + - first: + 4: 400088 + second: ShoulderPadBody_Left + - first: + 4: 400090 + second: ShoulderPadBody_Right + - first: + 4: 400092 + second: ShoulderPadCTRL_Left + - first: + 4: 400094 + second: ShoulderPadCTRL_Right + - first: + 4: 400096 + second: Spine + - first: + 4: 400098 + second: Thumb_Distal_Left + - first: + 4: 400100 + second: Thumb_Distal_Right + - first: + 4: 400102 + second: Thumb_Intermediate_Left + - first: + 4: 400104 + second: Thumb_Intermediate_Right + - first: + 4: 400106 + second: Thumb_Proximal_Left + - first: + 4: 400108 + second: Thumb_Proximal_Right + - first: + 4: 400110 + second: Toe_Left + - first: + 4: 400112 + second: Toe_Right + - first: + 4: 400114 + second: Toetip_Left + - first: + 4: 400116 + second: Toetip_Right + - first: + 4: 400118 + second: Trigger_Right + - first: + 4: 400120 + second: UpperArm_Left + - first: + 4: 400122 + second: UpperArm_Right + - first: + 4: 400124 + second: UpperLeg_Left + - first: + 4: 400126 + second: UpperLeg_Right + - first: + 21: 2100000 + second: w_h_01 + - first: + 21: 2100002 + second: w_b_01 + - first: + 21: 2100004 + second: w_bp_01 + - first: + 21: 2100006 + second: w_a_01 + - first: + 21: 2100008 + second: w_f_01 + - first: + 21: 2100010 + second: w_w_01 + - first: + 43: 4300000 + second: head1 + - first: + 43: 4300002 + second: Body1 + - first: + 43: 4300004 + second: Backpack1 + - first: + 43: 4300006 + second: Arm1 + - first: + 43: 4300008 + second: Leg1 + - first: + 43: 4300010 + second: AssaultRifle + - first: + 74: 7400000 + second: Idle_gunMiddle_AR + - first: + 95: 9500000 + second: //RootNode + - first: + 137: 13700000 + second: Arm1 + - first: + 137: 13700002 + second: AssaultRifle + - first: + 137: 13700004 + second: Backpack1 + - first: + 137: 13700006 + second: Body1 + - first: + 137: 13700008 + second: head1 + - first: + 137: 13700010 + second: Leg1 + - first: + 41386430: 2186277476908879412 + second: ImportLogs externalObjects: {} materials: - importMaterials: 1 + materialImportMode: 1 materialName: 0 materialSearch: 1 materialLocation: 1 @@ -183,6 +481,7 @@ ModelImporter: - serializedVersion: 16 name: Idle_gunMiddle_AR takeName: Take 001 + internalID: 0 firstFrame: 0 lastFrame: 40 wrapMode: 0 @@ -343,25 +642,28 @@ ModelImporter: meshCompression: 0 addColliders: 0 useSRGBMaterialColor: 1 + sortHierarchyByName: 1 importVisibility: 1 importBlendShapes: 1 importCameras: 1 importLights: 1 + fileIdsGeneration: 1 swapUVChannels: 0 generateSecondaryUV: 0 useFileUnits: 1 - optimizeMeshForGPU: 1 keepQuads: 0 weldVertices: 1 preserveHierarchy: 0 + skinWeightsMode: 0 + maxBonesPerVertex: 4 + minBoneWeight: 0.001 + meshOptimizationFlags: -1 indexFormat: 0 secondaryUVAngleDistortion: 8 secondaryUVAreaDistortion: 15.000001 secondaryUVHardAngle: 88 secondaryUVPackMargin: 4 useFileScale: 1 - previousCalculatedGlobalScale: 1 - hasPreviousCalculatedGlobalScale: 1 tangentSpace: normalSmoothAngle: 60 normalImportMode: 0 @@ -370,10 +672,10 @@ ModelImporter: legacyComputeAllNormalsFromSmoothingGroupsWhenMeshHasBlendShapes: 0 blendShapeNormalImportMode: 1 normalSmoothingSource: 0 + referencedClips: [] importAnimation: 1 - copyAvatar: 0 humanDescription: - serializedVersion: 2 + serializedVersion: 3 human: - boneName: Hips humanName: Hips @@ -1015,13 +1317,16 @@ ModelImporter: armStretch: 0.05 legStretch: 0.05 feetSpacing: 0 + globalScale: 1 rootMotionBoneName: hasTranslationDoF: 0 hasExtraRoot: 1 skeletonHasParents: 1 lastHumanDescriptionAvatarSource: {instanceID: 0} + autoGenerateAvatarMappingIfUnspecified: 1 animationType: 3 humanoidOversampling: 1 + avatarSetup: 1 additionalBone: 0 userData: assetBundleName: diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Jump.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Jump.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Jump.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Jump.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Jump.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Jump.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Jump.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Jump.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Reload.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Reload.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Reload.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Reload.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Reload.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Reload.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Reload.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Reload.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Run_guard_AR.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Run_guard_AR.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Run_guard_AR.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Run_guard_AR.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Run_guard_AR.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Run_guard_AR.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Run_guard_AR.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Run_guard_AR.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Run_gunMiddle_AR.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Run_gunMiddle_AR.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Run_gunMiddle_AR.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Run_gunMiddle_AR.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Run_gunMiddle_AR.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Run_gunMiddle_AR.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Run_gunMiddle_AR.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Run_gunMiddle_AR.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Shoot_AutoShot_AR.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Shoot_AutoShot_AR.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Shoot_AutoShot_AR.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Shoot_AutoShot_AR.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Shoot_AutoShot_AR.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Shoot_AutoShot_AR.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Shoot_AutoShot_AR.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Shoot_AutoShot_AR.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Shoot_BurstShot_AR.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Shoot_BurstShot_AR.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Shoot_BurstShot_AR.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Shoot_BurstShot_AR.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Shoot_BurstShot_AR.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Shoot_BurstShot_AR.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Shoot_BurstShot_AR.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Shoot_BurstShot_AR.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Shoot_SingleShot_AR.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Shoot_SingleShot_AR.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Shoot_SingleShot_AR.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Shoot_SingleShot_AR.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/Shoot_SingleShot_AR.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Shoot_SingleShot_AR.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/Shoot_SingleShot_AR.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/Shoot_SingleShot_AR.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/WalkBack_Shoot_ar.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkBack_Shoot_ar.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/WalkBack_Shoot_ar.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkBack_Shoot_ar.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/WalkBack_Shoot_ar.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkBack_Shoot_ar.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/WalkBack_Shoot_ar.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkBack_Shoot_ar.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/WalkFront_Shoot_ar.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkFront_Shoot_ar.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/WalkFront_Shoot_ar.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkFront_Shoot_ar.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/WalkFront_Shoot_ar.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkFront_Shoot_ar.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/WalkFront_Shoot_ar.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkFront_Shoot_ar.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/WalkLeft_Shoot_ar.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkLeft_Shoot_ar.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/WalkLeft_Shoot_ar.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkLeft_Shoot_ar.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/WalkLeft_Shoot_ar.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkLeft_Shoot_ar.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/WalkLeft_Shoot_ar.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkLeft_Shoot_ar.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/WalkRight_Shoot_ar.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkRight_Shoot_ar.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/WalkRight_Shoot_ar.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkRight_Shoot_ar.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animations/WalkRight_Shoot_ar.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkRight_Shoot_ar.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animations/WalkRight_Shoot_ar.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animations/WalkRight_Shoot_ar.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animators.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animators.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animators.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animators.meta diff --git a/Assets/Arts/SciFiWarriorPBRHPPolyart/Animators/SciFiWarrior.controller b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animators/SciFiWarrior.controller new file mode 100644 index 0000000..01ed567 --- /dev/null +++ b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animators/SciFiWarrior.controller @@ -0,0 +1,93 @@ +%YAML 1.1 +%TAG !u! tag:unity3d.com,2011: +--- !u!206 &-2926378548289824311 +BlendTree: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: Blend Tree + m_Childs: [] + m_BlendParameter: Blend + m_BlendParameterY: Blend + m_MinThreshold: 0 + m_MaxThreshold: 1 + m_UseAutomaticThresholds: 1 + m_NormalizedBlendValues: 0 + m_BlendType: 0 +--- !u!91 &9100000 +AnimatorController: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: SciFiWarrior + serializedVersion: 5 + m_AnimatorParameters: + - m_Name: Blend + m_Type: 1 + m_DefaultFloat: 0 + m_DefaultInt: 0 + m_DefaultBool: 0 + m_Controller: {fileID: 0} + m_AnimatorLayers: + - serializedVersion: 5 + m_Name: Base Layer + m_StateMachine: {fileID: 1107476691846109156} + m_Mask: {fileID: 0} + m_Motions: [] + m_Behaviours: [] + m_BlendingMode: 0 + m_SyncedLayerIndex: -1 + m_DefaultWeight: 0 + m_IKPass: 0 + m_SyncedLayerAffectsTiming: 0 + m_Controller: {fileID: 9100000} +--- !u!1107 &1107476691846109156 +AnimatorStateMachine: + serializedVersion: 6 + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: Base Layer + m_ChildStates: + - serializedVersion: 1 + m_State: {fileID: 5405818589905925533} + m_Position: {x: 353.98297, y: 140.66443, z: 0} + m_ChildStateMachines: [] + m_AnyStateTransitions: [] + m_EntryTransitions: [] + m_StateMachineTransitions: {} + m_StateMachineBehaviours: [] + m_AnyStatePosition: {x: 50, y: 20, z: 0} + m_EntryPosition: {x: 50, y: 120, z: 0} + m_ExitPosition: {x: 48, y: 72, z: 0} + m_ParentStateMachinePosition: {x: 800, y: 20, z: 0} + m_DefaultState: {fileID: 5405818589905925533} +--- !u!1102 &5405818589905925533 +AnimatorState: + serializedVersion: 6 + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: Blend Tree + m_Speed: 1 + m_CycleOffset: 0 + m_Transitions: [] + m_StateMachineBehaviours: [] + m_Position: {x: 50, y: 50, z: 0} + m_IKOnFeet: 0 + m_WriteDefaultValues: 1 + m_Mirror: 0 + m_SpeedParameterActive: 0 + m_MirrorParameterActive: 0 + m_CycleOffsetParameterActive: 0 + m_TimeParameterActive: 0 + m_Motion: {fileID: 7400000, guid: 107649059ea401b4e9c5c20f21e99a55, type: 3} + m_Tag: + m_SpeedParameter: + m_MirrorParameter: + m_CycleOffsetParameter: + m_TimeParameter: diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animators/SciFiWarrior.controller.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animators/SciFiWarrior.controller.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animators/SciFiWarrior.controller.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animators/SciFiWarrior.controller.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animators/SciFiWarriorOur.controller b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animators/SciFiWarriorOur.controller old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animators/SciFiWarriorOur.controller rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animators/SciFiWarriorOur.controller diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animators/SciFiWarriorOur.controller.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Animators/SciFiWarriorOur.controller.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Animators/SciFiWarriorOur.controller.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Animators/SciFiWarriorOur.controller.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Materials.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Materials.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Materials.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Materials.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Materials/HP.mat b/Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/HP.mat old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Materials/HP.mat rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/HP.mat diff --git a/Assets/SciFiWarriorPBRHPPolyart/Materials/HP.mat.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/HP.mat.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Materials/HP.mat.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/HP.mat.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Materials/PBR.mat b/Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/PBR.mat old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Materials/PBR.mat rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/PBR.mat diff --git a/Assets/SciFiWarriorPBRHPPolyart/Materials/PBR.mat.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/PBR.mat.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Materials/PBR.mat.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/PBR.mat.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Materials/Polyart.mat b/Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/Polyart.mat old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Materials/Polyart.mat rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/Polyart.mat diff --git a/Assets/SciFiWarriorPBRHPPolyart/Materials/Polyart.mat.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/Polyart.mat.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Materials/Polyart.mat.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/Polyart.mat.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Materials/Skybox_Mat.mat b/Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/Skybox_Mat.mat old mode 100755 new mode 100644 similarity index 96% rename from Assets/SciFiWarriorPBRHPPolyart/Materials/Skybox_Mat.mat rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/Skybox_Mat.mat index a1934e5..ba73d5d --- a/Assets/SciFiWarriorPBRHPPolyart/Materials/Skybox_Mat.mat +++ b/Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/Skybox_Mat.mat @@ -1,89 +1,89 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!21 &2100000 -Material: - serializedVersion: 6 - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_Name: Skybox_Mat - m_Shader: {fileID: 106, guid: 0000000000000000f000000000000000, type: 0} - m_ShaderKeywords: _METALLIC_SETUP _SUNDISK_HIGH_QUALITY - m_LightmapFlags: 4 - m_EnableInstancingVariants: 0 - m_DoubleSidedGI: 0 - m_CustomRenderQueue: -1 - stringTagMap: {} - disabledShaderPasses: [] - m_SavedProperties: - serializedVersion: 3 - m_TexEnvs: - - _BumpMap: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _DetailAlbedoMap: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _DetailMask: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _DetailNormalMap: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _EmissionMap: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _MainTex: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _MetallicGlossMap: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _OcclusionMap: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _ParallaxMap: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _SpecGlossMap: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - m_Floats: - - _AtmosphereThickness: 0.53 - - _BumpScale: 1 - - _Cutoff: 0.5 - - _DetailNormalMapScale: 1 - - _DstBlend: 0 - - _Exposure: 1.25 - - _GlossMapScale: 1 - - _Glossiness: 0.5 - - _GlossyReflections: 1 - - _Metallic: 0 - - _Mode: 0 - - _OcclusionStrength: 1 - - _Parallax: 0.02 - - _SmoothnessTextureChannel: 0 - - _SpecularHighlights: 1 - - _SrcBlend: 1 - - _SunDisk: 2 - - _SunSize: 0.04 - - _SunSizeConvergence: 5 - - _UVSec: 0 - - _WorkflowMode: 1 - - _ZWrite: 1 - m_Colors: - - _Color: {r: 1, g: 1, b: 1, a: 1} - - _EmissionColor: {r: 0, g: 0, b: 0, a: 1} - - _GroundColor: {r: 0.6392157, g: 0.6901961, b: 0.7411765, a: 1} - - _SkyTint: {r: 1, g: 1, b: 1, a: 1} - - _SpecColor: {r: 0.19999996, g: 0.19999996, b: 0.19999996, a: 1} +%YAML 1.1 +%TAG !u! tag:unity3d.com,2011: +--- !u!21 &2100000 +Material: + serializedVersion: 6 + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 0} + m_Name: Skybox_Mat + m_Shader: {fileID: 106, guid: 0000000000000000f000000000000000, type: 0} + m_ShaderKeywords: _METALLIC_SETUP _SUNDISK_HIGH_QUALITY + m_LightmapFlags: 4 + m_EnableInstancingVariants: 0 + m_DoubleSidedGI: 0 + m_CustomRenderQueue: -1 + stringTagMap: {} + disabledShaderPasses: [] + m_SavedProperties: + serializedVersion: 3 + m_TexEnvs: + - _BumpMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _DetailAlbedoMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _DetailMask: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _DetailNormalMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _EmissionMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _MainTex: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _MetallicGlossMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _OcclusionMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _ParallaxMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _SpecGlossMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + m_Floats: + - _AtmosphereThickness: 0.53 + - _BumpScale: 1 + - _Cutoff: 0.5 + - _DetailNormalMapScale: 1 + - _DstBlend: 0 + - _Exposure: 1.25 + - _GlossMapScale: 1 + - _Glossiness: 0.5 + - _GlossyReflections: 1 + - _Metallic: 0 + - _Mode: 0 + - _OcclusionStrength: 1 + - _Parallax: 0.02 + - _SmoothnessTextureChannel: 0 + - _SpecularHighlights: 1 + - _SrcBlend: 1 + - _SunDisk: 2 + - _SunSize: 0.04 + - _SunSizeConvergence: 5 + - _UVSec: 0 + - _WorkflowMode: 1 + - _ZWrite: 1 + m_Colors: + - _Color: {r: 1, g: 1, b: 1, a: 1} + - _EmissionColor: {r: 0, g: 0, b: 0, a: 1} + - _GroundColor: {r: 0.6392157, g: 0.6901961, b: 0.7411765, a: 1} + - _SkyTint: {r: 1, g: 1, b: 1, a: 1} + - _SpecColor: {r: 0.19999996, g: 0.19999996, b: 0.19999996, a: 1} diff --git a/Assets/SciFiWarriorPBRHPPolyart/Materials/Skybox_Mat.mat.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/Skybox_Mat.mat.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Materials/Skybox_Mat.mat.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/Skybox_Mat.mat.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Materials/Stage.mat b/Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/Stage.mat old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Materials/Stage.mat rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/Stage.mat diff --git a/Assets/SciFiWarriorPBRHPPolyart/Materials/Stage.mat.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/Stage.mat.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Materials/Stage.mat.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Materials/Stage.mat.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Mesh.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Mesh.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Mesh.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Mesh.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Mesh/PBR_HP_Mesh.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Mesh/PBR_HP_Mesh.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Mesh/PBR_HP_Mesh.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Mesh/PBR_HP_Mesh.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Mesh/PBR_HP_Mesh.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Mesh/PBR_HP_Mesh.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Mesh/PBR_HP_Mesh.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Mesh/PBR_HP_Mesh.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Mesh/Polyart_Mesh.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Mesh/Polyart_Mesh.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Mesh/Polyart_Mesh.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Mesh/Polyart_Mesh.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Mesh/Polyart_Mesh.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Mesh/Polyart_Mesh.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Mesh/Polyart_Mesh.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Mesh/Polyart_Mesh.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Mesh/Stage.fbx b/Assets/Arts/SciFiWarriorPBRHPPolyart/Mesh/Stage.fbx old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Mesh/Stage.fbx rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Mesh/Stage.fbx diff --git a/Assets/SciFiWarriorPBRHPPolyart/Mesh/Stage.fbx.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Mesh/Stage.fbx.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Mesh/Stage.fbx.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Mesh/Stage.fbx.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Prefabs.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Prefabs.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Prefabs.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Prefabs.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Prefabs/HPCharacter.prefab b/Assets/Arts/SciFiWarriorPBRHPPolyart/Prefabs/HPCharacter.prefab old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Prefabs/HPCharacter.prefab rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Prefabs/HPCharacter.prefab diff --git a/Assets/SciFiWarriorPBRHPPolyart/Prefabs/HPCharacter.prefab.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Prefabs/HPCharacter.prefab.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Prefabs/HPCharacter.prefab.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Prefabs/HPCharacter.prefab.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Prefabs/PBRCharacter.prefab b/Assets/Arts/SciFiWarriorPBRHPPolyart/Prefabs/PBRCharacter.prefab old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Prefabs/PBRCharacter.prefab rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Prefabs/PBRCharacter.prefab diff --git a/Assets/SciFiWarriorPBRHPPolyart/Prefabs/PBRCharacter.prefab.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Prefabs/PBRCharacter.prefab.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Prefabs/PBRCharacter.prefab.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Prefabs/PBRCharacter.prefab.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Prefabs/PolyartCharacter.prefab b/Assets/Arts/SciFiWarriorPBRHPPolyart/Prefabs/PolyartCharacter.prefab old mode 100755 new mode 100644 similarity index 96% rename from Assets/SciFiWarriorPBRHPPolyart/Prefabs/PolyartCharacter.prefab rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Prefabs/PolyartCharacter.prefab index 67bf0a4..56bc88c --- a/Assets/SciFiWarriorPBRHPPolyart/Prefabs/PolyartCharacter.prefab +++ b/Assets/Arts/SciFiWarriorPBRHPPolyart/Prefabs/PolyartCharacter.prefab @@ -1715,6 +1715,10 @@ GameObject: m_Component: - component: {fileID: 7072444061591351794} - component: {fileID: 7072444061582351952} + - component: {fileID: 6381261399840543104} + - component: {fileID: 2971219656579870553} + - component: {fileID: 5912052353413311719} + - component: {fileID: 8293113947138725737} m_Layer: 0 m_Name: PolyartCharacter m_TagString: Untagged @@ -1762,6 +1766,104 @@ Animator: m_HasTransformHierarchy: 1 m_AllowConstantClipSamplingOptimization: 1 m_KeepAnimatorControllerStateOnDisable: 0 +--- !u!136 &6381261399840543104 +CapsuleCollider: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 7072444061591711186} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 1 + m_Radius: 0.5 + m_Height: 2.1 + m_Direction: 1 + m_Center: {x: 0, y: 1.06, z: 0} +--- !u!114 &2971219656579870553 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 7072444061591711186} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 9826297ef4d853741b2af768441ec7f7, type: 3} + m_Name: + m_EditorClassIdentifier: + input_View: {x: 0, y: 0} + cameraHolder: {fileID: 7072444061591351696} + feetTransform: {fileID: 7072444061591351794} + playerSettings: + ViewXSensetivity: 0 + ViewYSensetivity: 0 + ViewXInverted: 0 + ViewYInverted: 0 + SprintingHold: 0 + MovementSmoothing: 0 + RunningForwardSpeed: 0 + RunningStrafeSpeed: 0 + WalkingForwardSpeed: 0 + WalkingBackwardSpeed: 0 + WalkingStrafeSpeed: 0 + JumpingHeight: 0 + JumpingFalloff: 0 + FallingSmoothing: 0 + SpeedEffector: 1 + CrouchSpeedEffector: 0 + ProneSpeedEffector: 0 + FallingSpeedEffector: 0 + ViewClampYMin: -70 + ViewClampYMax: 80 + playerMask: + serializedVersion: 2 + m_Bits: 0 + gravityAmount: 9.81 + gravityMin: 0 + jumpingForce: {x: 0, y: 30, z: 0} + playerStance: 0 + playerStanceSmoothing: 0 + playerStandStance: + CameraHeight: 0 + StanceCollider: {fileID: 6381261399840543104} + playerCrouchStance: + CameraHeight: 0 + StanceCollider: {fileID: 0} + playerProneStance: + CameraHeight: 0 + StanceCollider: {fileID: 0} + currentWeapon: {fileID: 0} +--- !u!114 &5912052353413311719 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 7072444061591711186} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 907ff02de47a55a4e971d73d25e7d006, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!143 &8293113947138725737 +CharacterController: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 7072444061591711186} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 1 + serializedVersion: 2 + m_Height: 2.1 + m_Radius: 0.5 + m_SlopeLimit: 45 + m_StepOffset: 0.3 + m_SkinWidth: 0.08 + m_MinMoveDistance: 0.001 + m_Center: {x: 0, y: 1.02, z: 0} --- !u!1 &7072444061591711188 GameObject: m_ObjectHideFlags: 0 diff --git a/Assets/SciFiWarriorPBRHPPolyart/Prefabs/PolyartCharacter.prefab.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Prefabs/PolyartCharacter.prefab.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Prefabs/PolyartCharacter.prefab.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Prefabs/PolyartCharacter.prefab.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Scene.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Scene.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Scene.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Scene.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Scene/DemoScene01.unity b/Assets/Arts/SciFiWarriorPBRHPPolyart/Scene/DemoScene01.unity old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Scene/DemoScene01.unity rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Scene/DemoScene01.unity diff --git a/Assets/SciFiWarriorPBRHPPolyart/Scene/DemoScene01.unity.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Scene/DemoScene01.unity.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Scene/DemoScene01.unity.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Scene/DemoScene01.unity.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/HP_Albedo.png b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/HP_Albedo.png old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/HP_Albedo.png rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/HP_Albedo.png diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/HP_Albedo.png.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/HP_Albedo.png.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/HP_Albedo.png.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/HP_Albedo.png.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Albedo.png b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Albedo.png old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Albedo.png rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Albedo.png diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Albedo.png.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Albedo.png.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Albedo.png.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Albedo.png.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_AO.png b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_AO.png old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_AO.png rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_AO.png diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_AO.png.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_AO.png.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_AO.png.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_AO.png.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_EM.png b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_EM.png old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_EM.png rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_EM.png diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_EM.png.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_EM.png.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_EM.png.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_EM.png.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_MS.png b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_MS.png old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_MS.png rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_MS.png diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_MS.png.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_MS.png.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_MS.png.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_MS.png.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_NM.png b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_NM.png old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_NM.png rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_NM.png diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_NM.png.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_NM.png.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_NM.png.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PBR_Free_NM.png.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/Polyart.png b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/Polyart.png old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/Polyart.png rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/Polyart.png diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/Polyart.png.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/Polyart.png.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/Polyart.png.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/Polyart.png.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/PolyartEmission.png b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PolyartEmission.png old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/PolyartEmission.png rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PolyartEmission.png diff --git a/Assets/SciFiWarriorPBRHPPolyart/Textures/PolyartEmission.png.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PolyartEmission.png.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/Textures/PolyartEmission.png.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/Textures/PolyartEmission.png.meta diff --git a/Assets/SciFiWarriorPBRHPPolyart/promotion.jpg b/Assets/Arts/SciFiWarriorPBRHPPolyart/promotion.jpg old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/promotion.jpg rename to Assets/Arts/SciFiWarriorPBRHPPolyart/promotion.jpg diff --git a/Assets/SciFiWarriorPBRHPPolyart/promotion.jpg.meta b/Assets/Arts/SciFiWarriorPBRHPPolyart/promotion.jpg.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/SciFiWarriorPBRHPPolyart/promotion.jpg.meta rename to Assets/Arts/SciFiWarriorPBRHPPolyart/promotion.jpg.meta diff --git a/Assets/Defaults/mat_proBuilder.mat b/Assets/Defaults/mat_proBuilder.mat deleted file mode 100755 index 0e86a32..0000000 --- a/Assets/Defaults/mat_proBuilder.mat +++ /dev/null @@ -1,77 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!21 &2100000 -Material: - serializedVersion: 6 - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: mat_proBuilder - m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0} - m_ShaderKeywords: - m_LightmapFlags: 4 - m_EnableInstancingVariants: 0 - m_DoubleSidedGI: 0 - m_CustomRenderQueue: -1 - stringTagMap: {} - disabledShaderPasses: [] - m_SavedProperties: - serializedVersion: 3 - m_TexEnvs: - - _BumpMap: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _DetailAlbedoMap: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _DetailMask: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _DetailNormalMap: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _EmissionMap: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _MainTex: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _MetallicGlossMap: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _OcclusionMap: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - - _ParallaxMap: - m_Texture: {fileID: 0} - m_Scale: {x: 1, y: 1} - m_Offset: {x: 0, y: 0} - m_Floats: - - _BumpScale: 1 - - _Cutoff: 0.5 - - _DetailNormalMapScale: 1 - - _DstBlend: 0 - - _GlossMapScale: 1 - - _Glossiness: 0.5 - - _GlossyReflections: 1 - - _Metallic: 0 - - _Mode: 0 - - _OcclusionStrength: 1 - - _Parallax: 0.02 - - _SmoothnessTextureChannel: 0 - - _SpecularHighlights: 1 - - _SrcBlend: 1 - - _UVSec: 0 - - _ZWrite: 1 - m_Colors: - - _Color: {r: 0.735849, g: 0.13536847, b: 0.13536847, a: 1} - - _EmissionColor: {r: 0, g: 0, b: 0, a: 1} diff --git a/Assets/Inputs/InputSystem.inputsettings.asset b/Assets/Inputs/InputSystem.inputsettings.asset new file mode 100644 index 0000000..8a242b0 --- /dev/null +++ b/Assets/Inputs/InputSystem.inputsettings.asset @@ -0,0 +1,35 @@ +%YAML 1.1 +%TAG !u! tag:unity3d.com,2011: +--- !u!114 &11400000 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 0} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: c46f07b5ed07e4e92aa78254188d3d10, type: 3} + m_Name: InputSystem.inputsettings + m_EditorClassIdentifier: + m_SupportedDevices: [] + m_UpdateMode: 1 + m_MaxEventBytesPerUpdate: 5242880 + m_MaxQueuedEventsPerUpdate: 1000 + m_CompensateForScreenOrientation: 1 + m_BackgroundBehavior: 0 + m_EditorInputBehaviorInPlayMode: 0 + m_DefaultDeadzoneMin: 0.125 + m_DefaultDeadzoneMax: 0.925 + m_DefaultButtonPressPoint: 0.5 + m_ButtonReleaseThreshold: 0.75 + m_DefaultTapTime: 0.2 + m_DefaultSlowTapTime: 0.5 + m_DefaultHoldTime: 0.4 + m_TapRadius: 5 + m_MultiTapDelayTime: 0.75 + m_DisableRedundantEventsMerging: 0 + m_iOSSettings: + m_MotionUsage: + m_Enabled: 0 + m_Description: diff --git a/Assets/Inputs/InputSystem.inputsettings.asset.meta b/Assets/Inputs/InputSystem.inputsettings.asset.meta new file mode 100644 index 0000000..20c5cb0 --- /dev/null +++ b/Assets/Inputs/InputSystem.inputsettings.asset.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 4480bcbc35319b14588f3c0eb33e88c8 +NativeFormatImporter: + externalObjects: {} + mainObjectFileID: 11400000 + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Inputs/ThirdPersonViewInput.cs b/Assets/Inputs/ThirdPersonViewInput.cs new file mode 100644 index 0000000..89ed85f --- /dev/null +++ b/Assets/Inputs/ThirdPersonViewInput.cs @@ -0,0 +1,354 @@ +//------------------------------------------------------------------------------ +// +// This code was auto-generated by com.unity.inputsystem:InputActionCodeGenerator +// version 1.3.0 +// from Assets/Inputs/ThirdPersonViewInput.inputactions +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + +using System; +using System.Collections; +using System.Collections.Generic; +using UnityEngine.InputSystem; +using UnityEngine.InputSystem.Utilities; + +public partial class @ThirdPersonViewInput : IInputActionCollection2, IDisposable +{ + public InputActionAsset asset { get; } + public @ThirdPersonViewInput() + { + asset = InputActionAsset.FromJson(@"{ + ""name"": ""ThirdPersonViewInput"", + ""maps"": [ + { + ""name"": ""Player Movement"", + ""id"": ""50a259c9-d17f-4483-bd4b-feb4e290989d"", + ""actions"": [ + { + ""name"": ""Movement"", + ""type"": ""PassThrough"", + ""id"": ""be8e85db-0110-45cd-823f-1058ed7eb184"", + ""expectedControlType"": ""Vector2"", + ""processors"": """", + ""interactions"": """", + ""initialStateCheck"": false + }, + { + ""name"": ""Camera"", + ""type"": ""PassThrough"", + ""id"": ""f7ad7ce9-d43f-468d-ab0f-54cc66ac33fa"", + ""expectedControlType"": ""Vector2"", + ""processors"": """", + ""interactions"": """", + ""initialStateCheck"": false + } + ], + ""bindings"": [ + { + ""name"": ""WASD"", + ""id"": ""72e37b69-1a0d-4e6e-a5aa-7a2a62845d22"", + ""path"": ""2DVector(mode=2)"", + ""interactions"": """", + ""processors"": """", + ""groups"": """", + ""action"": ""Movement"", + ""isComposite"": true, + ""isPartOfComposite"": false + }, + { + ""name"": ""up"", + ""id"": ""a5e60c10-53e0-484a-be45-210fc2d3d305"", + ""path"": ""/w"", + ""interactions"": """", + ""processors"": """", + ""groups"": """", + ""action"": ""Movement"", + ""isComposite"": false, + ""isPartOfComposite"": true + }, + { + ""name"": ""down"", + ""id"": ""cdafb043-b1bc-4680-9d55-91854d1e2eb4"", + ""path"": ""/s"", + ""interactions"": """", + ""processors"": """", + ""groups"": """", + ""action"": ""Movement"", + ""isComposite"": false, + ""isPartOfComposite"": true + }, + { + ""name"": ""left"", + ""id"": ""4da8623e-6baa-4256-99ec-d016f9ad6c1f"", + ""path"": ""/a"", + ""interactions"": """", + ""processors"": """", + ""groups"": """", + ""action"": ""Movement"", + ""isComposite"": false, + ""isPartOfComposite"": true + }, + { + ""name"": ""right"", + ""id"": ""5c98d073-e749-49de-bb73-c6f5920f53f1"", + ""path"": ""/d"", + ""interactions"": """", + ""processors"": """", + ""groups"": """", + ""action"": ""Movement"", + ""isComposite"": false, + ""isPartOfComposite"": true + }, + { + ""name"": """", + ""id"": ""d8db2334-67bd-461e-92dc-dc04f2b64660"", + ""path"": ""/delta"", + ""interactions"": """", + ""processors"": ""NormalizeVector2"", + ""groups"": """", + ""action"": ""Camera"", + ""isComposite"": false, + ""isPartOfComposite"": false + } + ] + }, + { + ""name"": ""Player Actions"", + ""id"": ""7c28b681-44ce-4c44-832c-561051fceb49"", + ""actions"": [ + { + ""name"": ""Jump"", + ""type"": ""Button"", + ""id"": ""ef60df41-7d24-45f4-a426-4e6b9fe4d374"", + ""expectedControlType"": ""Button"", + ""processors"": """", + ""interactions"": """", + ""initialStateCheck"": false + }, + { + ""name"": ""Crouch"", + ""type"": ""Button"", + ""id"": ""19075beb-f2d7-4af5-bbcc-e2b53e1e139a"", + ""expectedControlType"": ""Button"", + ""processors"": """", + ""interactions"": """", + ""initialStateCheck"": false + }, + { + ""name"": ""Fire"", + ""type"": ""PassThrough"", + ""id"": ""2b493869-f7ab-4acb-8918-11b0265f0993"", + ""expectedControlType"": ""Button"", + ""processors"": """", + ""interactions"": """", + ""initialStateCheck"": false + } + ], + ""bindings"": [ + { + ""name"": """", + ""id"": ""e8b4d9c1-7565-4954-b6f9-f9c7ed95f613"", + ""path"": ""/space"", + ""interactions"": """", + ""processors"": """", + ""groups"": """", + ""action"": ""Jump"", + ""isComposite"": false, + ""isPartOfComposite"": false + }, + { + ""name"": """", + ""id"": ""a02f0962-5e78-44c2-aaf3-ee059376bda6"", + ""path"": ""/c"", + ""interactions"": """", + ""processors"": """", + ""groups"": """", + ""action"": ""Crouch"", + ""isComposite"": false, + ""isPartOfComposite"": false + }, + { + ""name"": """", + ""id"": ""5caa47c3-a8d5-4aad-86fe-40cdca87210a"", + ""path"": ""/leftButton"", + ""interactions"": ""Hold(duration=0.1)"", + ""processors"": """", + ""groups"": """", + ""action"": ""Fire"", + ""isComposite"": false, + ""isPartOfComposite"": false + } + ] + } + ], + ""controlSchemes"": [] +}"); + // Player Movement + m_PlayerMovement = asset.FindActionMap("Player Movement", throwIfNotFound: true); + m_PlayerMovement_Movement = m_PlayerMovement.FindAction("Movement", throwIfNotFound: true); + m_PlayerMovement_Camera = m_PlayerMovement.FindAction("Camera", throwIfNotFound: true); + // Player Actions + m_PlayerActions = asset.FindActionMap("Player Actions", throwIfNotFound: true); + m_PlayerActions_Jump = m_PlayerActions.FindAction("Jump", throwIfNotFound: true); + m_PlayerActions_Crouch = m_PlayerActions.FindAction("Crouch", throwIfNotFound: true); + m_PlayerActions_Fire = m_PlayerActions.FindAction("Fire", throwIfNotFound: true); + } + + public void Dispose() + { + UnityEngine.Object.Destroy(asset); + } + + public InputBinding? bindingMask + { + get => asset.bindingMask; + set => asset.bindingMask = value; + } + + public ReadOnlyArray? devices + { + get => asset.devices; + set => asset.devices = value; + } + + public ReadOnlyArray controlSchemes => asset.controlSchemes; + + public bool Contains(InputAction action) + { + return asset.Contains(action); + } + + public IEnumerator GetEnumerator() + { + return asset.GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + public void Enable() + { + asset.Enable(); + } + + public void Disable() + { + asset.Disable(); + } + public IEnumerable bindings => asset.bindings; + + public InputAction FindAction(string actionNameOrId, bool throwIfNotFound = false) + { + return asset.FindAction(actionNameOrId, throwIfNotFound); + } + public int FindBinding(InputBinding bindingMask, out InputAction action) + { + return asset.FindBinding(bindingMask, out action); + } + + // Player Movement + private readonly InputActionMap m_PlayerMovement; + private IPlayerMovementActions m_PlayerMovementActionsCallbackInterface; + private readonly InputAction m_PlayerMovement_Movement; + private readonly InputAction m_PlayerMovement_Camera; + public struct PlayerMovementActions + { + private @ThirdPersonViewInput m_Wrapper; + public PlayerMovementActions(@ThirdPersonViewInput wrapper) { m_Wrapper = wrapper; } + public InputAction @Movement => m_Wrapper.m_PlayerMovement_Movement; + public InputAction @Camera => m_Wrapper.m_PlayerMovement_Camera; + public InputActionMap Get() { return m_Wrapper.m_PlayerMovement; } + public void Enable() { Get().Enable(); } + public void Disable() { Get().Disable(); } + public bool enabled => Get().enabled; + public static implicit operator InputActionMap(PlayerMovementActions set) { return set.Get(); } + public void SetCallbacks(IPlayerMovementActions instance) + { + if (m_Wrapper.m_PlayerMovementActionsCallbackInterface != null) + { + @Movement.started -= m_Wrapper.m_PlayerMovementActionsCallbackInterface.OnMovement; + @Movement.performed -= m_Wrapper.m_PlayerMovementActionsCallbackInterface.OnMovement; + @Movement.canceled -= m_Wrapper.m_PlayerMovementActionsCallbackInterface.OnMovement; + @Camera.started -= m_Wrapper.m_PlayerMovementActionsCallbackInterface.OnCamera; + @Camera.performed -= m_Wrapper.m_PlayerMovementActionsCallbackInterface.OnCamera; + @Camera.canceled -= m_Wrapper.m_PlayerMovementActionsCallbackInterface.OnCamera; + } + m_Wrapper.m_PlayerMovementActionsCallbackInterface = instance; + if (instance != null) + { + @Movement.started += instance.OnMovement; + @Movement.performed += instance.OnMovement; + @Movement.canceled += instance.OnMovement; + @Camera.started += instance.OnCamera; + @Camera.performed += instance.OnCamera; + @Camera.canceled += instance.OnCamera; + } + } + } + public PlayerMovementActions @PlayerMovement => new PlayerMovementActions(this); + + // Player Actions + private readonly InputActionMap m_PlayerActions; + private IPlayerActionsActions m_PlayerActionsActionsCallbackInterface; + private readonly InputAction m_PlayerActions_Jump; + private readonly InputAction m_PlayerActions_Crouch; + private readonly InputAction m_PlayerActions_Fire; + public struct PlayerActionsActions + { + private @ThirdPersonViewInput m_Wrapper; + public PlayerActionsActions(@ThirdPersonViewInput wrapper) { m_Wrapper = wrapper; } + public InputAction @Jump => m_Wrapper.m_PlayerActions_Jump; + public InputAction @Crouch => m_Wrapper.m_PlayerActions_Crouch; + public InputAction @Fire => m_Wrapper.m_PlayerActions_Fire; + public InputActionMap Get() { return m_Wrapper.m_PlayerActions; } + public void Enable() { Get().Enable(); } + public void Disable() { Get().Disable(); } + public bool enabled => Get().enabled; + public static implicit operator InputActionMap(PlayerActionsActions set) { return set.Get(); } + public void SetCallbacks(IPlayerActionsActions instance) + { + if (m_Wrapper.m_PlayerActionsActionsCallbackInterface != null) + { + @Jump.started -= m_Wrapper.m_PlayerActionsActionsCallbackInterface.OnJump; + @Jump.performed -= m_Wrapper.m_PlayerActionsActionsCallbackInterface.OnJump; + @Jump.canceled -= m_Wrapper.m_PlayerActionsActionsCallbackInterface.OnJump; + @Crouch.started -= m_Wrapper.m_PlayerActionsActionsCallbackInterface.OnCrouch; + @Crouch.performed -= m_Wrapper.m_PlayerActionsActionsCallbackInterface.OnCrouch; + @Crouch.canceled -= m_Wrapper.m_PlayerActionsActionsCallbackInterface.OnCrouch; + @Fire.started -= m_Wrapper.m_PlayerActionsActionsCallbackInterface.OnFire; + @Fire.performed -= m_Wrapper.m_PlayerActionsActionsCallbackInterface.OnFire; + @Fire.canceled -= m_Wrapper.m_PlayerActionsActionsCallbackInterface.OnFire; + } + m_Wrapper.m_PlayerActionsActionsCallbackInterface = instance; + if (instance != null) + { + @Jump.started += instance.OnJump; + @Jump.performed += instance.OnJump; + @Jump.canceled += instance.OnJump; + @Crouch.started += instance.OnCrouch; + @Crouch.performed += instance.OnCrouch; + @Crouch.canceled += instance.OnCrouch; + @Fire.started += instance.OnFire; + @Fire.performed += instance.OnFire; + @Fire.canceled += instance.OnFire; + } + } + } + public PlayerActionsActions @PlayerActions => new PlayerActionsActions(this); + public interface IPlayerMovementActions + { + void OnMovement(InputAction.CallbackContext context); + void OnCamera(InputAction.CallbackContext context); + } + public interface IPlayerActionsActions + { + void OnJump(InputAction.CallbackContext context); + void OnCrouch(InputAction.CallbackContext context); + void OnFire(InputAction.CallbackContext context); + } +} diff --git a/Assets/Inputs/ThirdPersonViewInput.cs.meta b/Assets/Inputs/ThirdPersonViewInput.cs.meta new file mode 100644 index 0000000..724326c --- /dev/null +++ b/Assets/Inputs/ThirdPersonViewInput.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: e6b0597c7a248fd4e8e5ca2ef5d7f29f +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Inputs/ThirdPersonViewInput.inputactions b/Assets/Inputs/ThirdPersonViewInput.inputactions new file mode 100644 index 0000000..a0c0388 --- /dev/null +++ b/Assets/Inputs/ThirdPersonViewInput.inputactions @@ -0,0 +1,166 @@ +{ + "name": "ThirdPersonViewInput", + "maps": [ + { + "name": "Player Movement", + "id": "50a259c9-d17f-4483-bd4b-feb4e290989d", + "actions": [ + { + "name": "Movement", + "type": "PassThrough", + "id": "be8e85db-0110-45cd-823f-1058ed7eb184", + "expectedControlType": "Vector2", + "processors": "", + "interactions": "", + "initialStateCheck": false + }, + { + "name": "Camera", + "type": "PassThrough", + "id": "f7ad7ce9-d43f-468d-ab0f-54cc66ac33fa", + "expectedControlType": "Vector2", + "processors": "", + "interactions": "", + "initialStateCheck": false + } + ], + "bindings": [ + { + "name": "WASD", + "id": "72e37b69-1a0d-4e6e-a5aa-7a2a62845d22", + "path": "2DVector(mode=2)", + "interactions": "", + "processors": "", + "groups": "", + "action": "Movement", + "isComposite": true, + "isPartOfComposite": false + }, + { + "name": "up", + "id": "a5e60c10-53e0-484a-be45-210fc2d3d305", + "path": "/w", + "interactions": "", + "processors": "", + "groups": "", + "action": "Movement", + "isComposite": false, + "isPartOfComposite": true + }, + { + "name": "down", + "id": "cdafb043-b1bc-4680-9d55-91854d1e2eb4", + "path": "/s", + "interactions": "", + "processors": "", + "groups": "", + "action": "Movement", + "isComposite": false, + "isPartOfComposite": true + }, + { + "name": "left", + "id": "4da8623e-6baa-4256-99ec-d016f9ad6c1f", + "path": "/a", + "interactions": "", + "processors": "", + "groups": "", + "action": "Movement", + "isComposite": false, + "isPartOfComposite": true + }, + { + "name": "right", + "id": "5c98d073-e749-49de-bb73-c6f5920f53f1", + "path": "/d", + "interactions": "", + "processors": "", + "groups": "", + "action": "Movement", + "isComposite": false, + "isPartOfComposite": true + }, + { + "name": "", + "id": "d8db2334-67bd-461e-92dc-dc04f2b64660", + "path": "/delta", + "interactions": "", + "processors": "NormalizeVector2", + "groups": "", + "action": "Camera", + "isComposite": false, + "isPartOfComposite": false + } + ] + }, + { + "name": "Player Actions", + "id": "7c28b681-44ce-4c44-832c-561051fceb49", + "actions": [ + { + "name": "Jump", + "type": "Button", + "id": "ef60df41-7d24-45f4-a426-4e6b9fe4d374", + "expectedControlType": "Button", + "processors": "", + "interactions": "", + "initialStateCheck": false + }, + { + "name": "Crouch", + "type": "Button", + "id": "19075beb-f2d7-4af5-bbcc-e2b53e1e139a", + "expectedControlType": "Button", + "processors": "", + "interactions": "", + "initialStateCheck": false + }, + { + "name": "Fire", + "type": "PassThrough", + "id": "2b493869-f7ab-4acb-8918-11b0265f0993", + "expectedControlType": "Button", + "processors": "", + "interactions": "", + "initialStateCheck": false + } + ], + "bindings": [ + { + "name": "", + "id": "e8b4d9c1-7565-4954-b6f9-f9c7ed95f613", + "path": "/space", + "interactions": "", + "processors": "", + "groups": "", + "action": "Jump", + "isComposite": false, + "isPartOfComposite": false + }, + { + "name": "", + "id": "a02f0962-5e78-44c2-aaf3-ee059376bda6", + "path": "/c", + "interactions": "", + "processors": "", + "groups": "", + "action": "Crouch", + "isComposite": false, + "isPartOfComposite": false + }, + { + "name": "", + "id": "5caa47c3-a8d5-4aad-86fe-40cdca87210a", + "path": "/leftButton", + "interactions": "Hold(duration=0.1)", + "processors": "", + "groups": "", + "action": "Fire", + "isComposite": false, + "isPartOfComposite": false + } + ] + } + ], + "controlSchemes": [] +} \ No newline at end of file diff --git a/Assets/Inputs/ThirdPersonViewInput.inputactions.meta b/Assets/Inputs/ThirdPersonViewInput.inputactions.meta new file mode 100644 index 0000000..2e8e4a7 --- /dev/null +++ b/Assets/Inputs/ThirdPersonViewInput.inputactions.meta @@ -0,0 +1,14 @@ +fileFormatVersion: 2 +guid: a17c8887bfca8834d8287fab2f0cadf8 +ScriptedImporter: + internalIDToNameTable: [] + externalObjects: {} + serializedVersion: 2 + userData: + assetBundleName: + assetBundleVariant: + script: {fileID: 11500000, guid: 8404be70184654265930450def6a9037, type: 3} + generateWrapperCode: 1 + wrapperCodePath: + wrapperClassName: + wrapperCodeNamespace: diff --git a/Assets/ML-Agents/Timers/DemoScene01_timers.json b/Assets/ML-Agents/Timers/DemoScene01_timers.json deleted file mode 100755 index 3914503..0000000 --- a/Assets/ML-Agents/Timers/DemoScene01_timers.json +++ /dev/null @@ -1 +0,0 @@ -{"count":1,"self":249.99626239999998,"total":250.70272609999998,"children":{"InitializeActuators":{"count":1,"self":0.0005131,"total":0.0005131,"children":null},"AgentSendState":{"count":8389,"self":0.1187579,"total":0.1187579,"children":null},"DecideAction":{"count":8389,"self":0.5871921,"total":0.5871921,"children":null}},"gauges":{},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1649259643","unity_version":"2019.4.36f1","command_line_arguments":"C:\\Program Files\\Unity\\Hub\\Editor\\2019.4.36f1\\Editor\\Unity.exe -projectpath D:\\real_shooter -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-user -hubSessionId 91406950-b5ad-11ec-a63c-e7b76cbae13d -accessToken EBt97pMhHqClFDnjD_uh-3vplxP-uI2yS0WK-hSxfuM012f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"DemoScene01","end_time_seconds":"1649259893"}} \ No newline at end of file diff --git a/Assets/ML-Agents/Timers/dont touch me plz_timers.json b/Assets/ML-Agents/Timers/dont touch me plz_timers.json deleted file mode 100755 index 699e26c..0000000 --- a/Assets/ML-Agents/Timers/dont touch me plz_timers.json +++ /dev/null @@ -1 +0,0 @@ -{"count":1,"self":14.5494256,"total":14.5766034,"children":{"InitializeActuators":{"count":1,"self":0,"total":0,"children":null},"AgentSendState":{"count":497,"self":0.0020012999999999997,"total":0.0020012999999999997,"children":null},"DecideAction":{"count":497,"self":0.0241768,"total":0.0241768,"children":null}},"gauges":{},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1649741696","unity_version":"2019.4.35f1","command_line_arguments":"D:\\Unity\\2019.4.35f1\\Editor\\Unity.exe -projectpath C:\\Users\\kiril\\real-shooter -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-kiril -hubSessionId 21e9de90-ba14-11ec-9398-079edccf5e34 -accessToken oIpf_7gKWQQRilVTeJehUsFhrxasdzsG_K3j5Swtgx0009f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"dont touch me plz","end_time_seconds":"1649741711"}} \ No newline at end of file diff --git a/Assets/Prefabs/CharacterPrefabs.meta b/Assets/Prefabs/CharacterPrefabs.meta new file mode 100644 index 0000000..2ca1002 --- /dev/null +++ b/Assets/Prefabs/CharacterPrefabs.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: a7a6fd6139b92ce43af2a2f1116ea410 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Prefabs/Bot.prefab b/Assets/Prefabs/CharacterPrefabs/Bot.prefab similarity index 100% rename from Assets/Prefabs/Bot.prefab rename to Assets/Prefabs/CharacterPrefabs/Bot.prefab diff --git a/Assets/Prefabs/Bot.prefab.meta b/Assets/Prefabs/CharacterPrefabs/Bot.prefab.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Prefabs/Bot.prefab.meta rename to Assets/Prefabs/CharacterPrefabs/Bot.prefab.meta diff --git a/Assets/Prefabs/CharacterPrefabs/Character.prefab b/Assets/Prefabs/CharacterPrefabs/Character.prefab new file mode 100644 index 0000000..0699e0c --- /dev/null +++ b/Assets/Prefabs/CharacterPrefabs/Character.prefab @@ -0,0 +1,2639 @@ +%YAML 1.1 +%TAG !u! tag:unity3d.com,2011: +--- !u!1 &6633355367928763904 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859616} + m_Layer: 0 + m_Name: Thumb_Intermediate_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859616 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763904} + m_LocalRotation: {x: -0.04900198, y: -0.13549508, z: 0.10049181, w: 0.98444974} + m_LocalPosition: {x: -9.835772, y: -1.1368684e-13, z: -2.842171e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859620} + m_Father: {fileID: 6633355367928859628} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928763906 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859618} + m_Layer: 0 + m_Name: Thumb_Distal_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859618 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763906} + m_LocalRotation: {x: 0.0017341404, y: 0.008320127, z: -0.005799853, w: 0.9999471} + m_LocalPosition: {x: -8.816269, y: 0, z: -1.4210855e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859630} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928763908 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859620} + m_Layer: 0 + m_Name: Thumb_Distal_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859620 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763908} + m_LocalRotation: {x: -0.017883137, y: -0.03974522, z: -0.025908269, w: 0.99871385} + m_LocalPosition: {x: -8.816608, y: 0, z: 1.4210855e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859616} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928763910 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859622} + m_Layer: 0 + m_Name: Spine + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859622 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763910} + m_LocalRotation: {x: 0.99452215, y: -0.104526356, z: -7.7878193e-10, w: 0.0000000074097675} + m_LocalPosition: {x: -21.090727, y: -8.881784e-15, z: -4.4013775e-16} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859538} + m_Father: {fileID: 6633355367928859552} + m_RootOrder: 2 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928763912 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859624} + m_Layer: 0 + m_Name: Toe_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859624 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763912} + m_LocalRotation: {x: 0.0013659683, y: 0.0051824837, z: -0.24785845, w: 0.9687815} + m_LocalPosition: {x: -13.771131, y: 1.9539925e-14, z: -1.0658141e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859636} + m_Father: {fileID: 6633355367928859536} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928763914 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859626} + m_Layer: 0 + m_Name: Thumb_Proximal_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859626 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763914} + m_LocalRotation: {x: -0.057529792, y: -0.49499094, z: 0.11207554, w: 0.859717} + m_LocalPosition: {x: -0.29218963, y: -0.56312394, z: -12.29586} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859630} + m_Father: {fileID: 6633355367928859546} + m_RootOrder: 2 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928763916 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859628} + m_Layer: 0 + m_Name: Thumb_Proximal_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859628 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763916} + m_LocalRotation: {x: 0.057522308, y: 0.49499637, z: 0.112048574, w: 0.8597179} + m_LocalPosition: {x: -0.2922163, y: -0.5636321, z: 12.295864} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859616} + m_Father: {fileID: 6633355367928859548} + m_RootOrder: 2 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928763918 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859630} + m_Layer: 0 + m_Name: Thumb_Intermediate_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859630 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763918} + m_LocalRotation: {x: 0.048999686, y: 0.1354733, z: 0.1004816, w: 0.9844539} + m_LocalPosition: {x: -9.836, y: -5.684342e-14, z: -5.684342e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859618} + m_Father: {fileID: 6633355367928859626} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928763920 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859632} + m_Layer: 0 + m_Name: Trigger_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859632 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763920} + m_LocalRotation: {x: -0.0031178175, y: 0.0071248533, z: 0.40088162, w: 0.91609687} + m_LocalPosition: {x: -9.913989, y: -10.731702, z: 9.284221} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859532} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928763922 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859634} + m_Layer: 0 + m_Name: Toetip_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859634 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763922} + m_LocalRotation: {x: -0.00028878966, y: 0.016934738, z: -0.012053749, w: 0.99978393} + m_LocalPosition: {x: -17.883856, y: -2.6645353e-15, z: -1.4210855e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859638} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928763924 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859636} + m_Layer: 0 + m_Name: Toetip_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859636 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763924} + m_LocalRotation: {x: 0.00016529544, y: -0.016936114, z: -0.0047631934, w: 0.99984527} + m_LocalPosition: {x: -17.883871, y: 3.5527137e-15, z: 2.1316282e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859624} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928763926 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859638} + m_Layer: 0 + m_Name: Toe_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859638 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763926} + m_LocalRotation: {x: -0.0013655907, y: -0.005181019, z: -0.24785995, w: 0.9687811} + m_LocalPosition: {x: -13.771146, y: 2.1316282e-14, z: 1.0658141e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859634} + m_Father: {fileID: 6633355367928859550} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928763928 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859640} + m_Layer: 0 + m_Name: UpperLeg_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859640 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763928} + m_LocalRotation: {x: 0.03061261, y: 0.6954094, z: 0.7139454, w: 0.07583304} + m_LocalPosition: {x: 0.084786385, y: 0.40212917, z: 18.8642} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859578} + m_Father: {fileID: 6633355367928859552} + m_RootOrder: 4 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928763930 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859642} + m_Layer: 0 + m_Name: UpperLeg_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859642 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763930} + m_LocalRotation: {x: -0.030611672, y: -0.6954005, z: 0.7139541, w: 0.07583354} + m_LocalPosition: {x: 0.08523814, y: 0.40205857, z: -18.864191} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859580} + m_Father: {fileID: 6633355367928859552} + m_RootOrder: 3 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928763932 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859644} + m_Layer: 0 + m_Name: UpperArm_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859644 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763932} + m_LocalRotation: {x: 0.029697837, y: 0.15896995, z: -0.18122736, w: 0.97005326} + m_LocalPosition: {x: -15.111769, y: 0, z: -3.1974423e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859582} + m_Father: {fileID: 6633355367928859604} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928763934 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859646} + m_Layer: 0 + m_Name: UpperArm_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859646 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928763934} + m_LocalRotation: {x: -0.029700447, y: -0.15896967, z: -0.18123563, w: 0.97005165} + m_LocalPosition: {x: -15.111847, y: 0, z: -3.7303494e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859568} + m_Father: {fileID: 6633355367928859606} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764320 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859520} + m_Layer: 0 + m_Name: ArmPlacement_Upper + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859520 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764320} + m_LocalRotation: {x: -0.002378591, y: 0.08787313, z: 0.02695381, w: 0.99576414} + m_LocalPosition: {x: -0.56803536, y: 46.34834, z: 10.906936} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859528} + m_RootOrder: 2 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764322 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859522} + m_Layer: 0 + m_Name: ArmPlacement_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859522 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764322} + m_LocalRotation: {x: -0.02695381, y: 0.99576414, z: -0.0023785909, w: -0.08787313} + m_LocalPosition: {x: 41.471436, y: 4.6548405, z: -15.306103} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859528} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764324 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859524} + m_Layer: 0 + m_Name: ArmPlacement_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859524 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764324} + m_LocalRotation: {x: -0.002378591, y: 0.08787313, z: 0.02695381, w: 0.99576414} + m_LocalPosition: {x: -44.450283, y: 2.842171e-14, z: -2.1316282e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859528} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764326 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859526} + - component: {fileID: 6633355367931943590} + m_Layer: 0 + m_Name: Arm1 + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859526 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764326} + m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -0, y: -163.22968, z: -0.3527179} + m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} + m_Children: [] + m_Father: {fileID: 6633355367928859588} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!137 &6633355367931943590 +SkinnedMeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764326} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 0 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 78660c09ff101ea4cb77de408a9527da, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 0 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 + serializedVersion: 2 + m_Quality: 0 + m_UpdateWhenOffscreen: 0 + m_SkinnedMotionVectors: 1 + m_Mesh: {fileID: 4300006, guid: 860793eb3324391468f1c120a75ec049, type: 3} + m_Bones: + - {fileID: 6633355367928859552} + - {fileID: 6633355367928859622} + - {fileID: 6633355367928859538} + - {fileID: 6633355367928859590} + - {fileID: 6633355367928859544} + - {fileID: 6633355367928859556} + - {fileID: 6633355367928859554} + - {fileID: 6633355367928859606} + - {fileID: 6633355367928859646} + - {fileID: 6633355367928859568} + - {fileID: 6633355367928859548} + - {fileID: 6633355367928859594} + - {fileID: 6633355367928859598} + - {fileID: 6633355367928859586} + - {fileID: 6633355367928859574} + - {fileID: 6633355367928859562} + - {fileID: 6633355367928859566} + - {fileID: 6633355367928859628} + - {fileID: 6633355367928859616} + - {fileID: 6633355367928859620} + - {fileID: 6633355367928859610} + - {fileID: 6633355367928859614} + - {fileID: 6633355367928859602} + - {fileID: 6633355367928859604} + - {fileID: 6633355367928859644} + - {fileID: 6633355367928859582} + - {fileID: 6633355367928859546} + - {fileID: 6633355367928859592} + - {fileID: 6633355367928859596} + - {fileID: 6633355367928859584} + - {fileID: 6633355367928859572} + - {fileID: 6633355367928859560} + - {fileID: 6633355367928859564} + - {fileID: 6633355367928859626} + - {fileID: 6633355367928859630} + - {fileID: 6633355367928859618} + - {fileID: 6633355367928859608} + - {fileID: 6633355367928859612} + - {fileID: 6633355367928859600} + - {fileID: 6633355367928859528} + - {fileID: 6633355367928859524} + - {fileID: 6633355367928859520} + - {fileID: 6633355367928859522} + - {fileID: 6633355367928859642} + - {fileID: 6633355367928859580} + - {fileID: 6633355367928859536} + - {fileID: 6633355367928859624} + - {fileID: 6633355367928859636} + - {fileID: 6633355367928859640} + - {fileID: 6633355367928859578} + - {fileID: 6633355367928859550} + - {fileID: 6633355367928859638} + - {fileID: 6633355367928859634} + - {fileID: 6633355367928859532} + - {fileID: 6633355367928859576} + - {fileID: 6633355367928859632} + - {fileID: 6633355367928859534} + m_BlendShapeWeights: [] + m_RootBone: {fileID: 6633355367928859552} + m_AABB: + m_Center: {x: -58.594677, y: 13.632675, z: 0.00010681152} + m_Extent: {x: 29.436275, y: 28.906914, z: 140.37485} + m_DirtyAABB: 0 +--- !u!1 &6633355367928764328 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859528} + m_Layer: 0 + m_Name: BackPack + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859528 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764328} + m_LocalRotation: {x: -0.3617453, y: 0.5306367, z: 0.3968756, w: 0.6557856} + m_LocalPosition: {x: -10.219411, y: -38.523506, z: 0.00000010329652} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859524} + - {fileID: 6633355367928859522} + - {fileID: 6633355367928859520} + m_Father: {fileID: 6633355367928859538} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764330 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859530} + - component: {fileID: 6633355367931943588} + m_Layer: 0 + m_Name: AssaultRifle + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859530 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764330} + m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -118.131454, y: -98.01, z: -57.969975} + m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} + m_Children: [] + m_Father: {fileID: 6633355367928859588} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!137 &6633355367931943588 +SkinnedMeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764330} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 0 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 78660c09ff101ea4cb77de408a9527da, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 0 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 + serializedVersion: 2 + m_Quality: 0 + m_UpdateWhenOffscreen: 0 + m_SkinnedMotionVectors: 1 + m_Mesh: {fileID: 4300010, guid: 860793eb3324391468f1c120a75ec049, type: 3} + m_Bones: + - {fileID: 6633355367928859552} + - {fileID: 6633355367928859622} + - {fileID: 6633355367928859538} + - {fileID: 6633355367928859590} + - {fileID: 6633355367928859544} + - {fileID: 6633355367928859556} + - {fileID: 6633355367928859554} + - {fileID: 6633355367928859606} + - {fileID: 6633355367928859646} + - {fileID: 6633355367928859568} + - {fileID: 6633355367928859548} + - {fileID: 6633355367928859594} + - {fileID: 6633355367928859598} + - {fileID: 6633355367928859586} + - {fileID: 6633355367928859574} + - {fileID: 6633355367928859562} + - {fileID: 6633355367928859566} + - {fileID: 6633355367928859628} + - {fileID: 6633355367928859616} + - {fileID: 6633355367928859620} + - {fileID: 6633355367928859610} + - {fileID: 6633355367928859614} + - {fileID: 6633355367928859602} + - {fileID: 6633355367928859604} + - {fileID: 6633355367928859644} + - {fileID: 6633355367928859582} + - {fileID: 6633355367928859546} + - {fileID: 6633355367928859592} + - {fileID: 6633355367928859596} + - {fileID: 6633355367928859584} + - {fileID: 6633355367928859572} + - {fileID: 6633355367928859560} + - {fileID: 6633355367928859564} + - {fileID: 6633355367928859626} + - {fileID: 6633355367928859630} + - {fileID: 6633355367928859618} + - {fileID: 6633355367928859608} + - {fileID: 6633355367928859612} + - {fileID: 6633355367928859600} + - {fileID: 6633355367928859528} + - {fileID: 6633355367928859524} + - {fileID: 6633355367928859520} + - {fileID: 6633355367928859522} + - {fileID: 6633355367928859642} + - {fileID: 6633355367928859580} + - {fileID: 6633355367928859536} + - {fileID: 6633355367928859624} + - {fileID: 6633355367928859636} + - {fileID: 6633355367928859640} + - {fileID: 6633355367928859578} + - {fileID: 6633355367928859550} + - {fileID: 6633355367928859638} + - {fileID: 6633355367928859634} + - {fileID: 6633355367928859532} + - {fileID: 6633355367928859576} + - {fileID: 6633355367928859632} + - {fileID: 6633355367928859534} + m_BlendShapeWeights: [] + m_RootBone: {fileID: 6633355367928859552} + m_AABB: + m_Center: {x: -34.27955, y: -49.079704, z: 120.48372} + m_Extent: {x: 95.48148, y: 94.6697, z: 10.629513} + m_DirtyAABB: 0 +--- !u!1 &6633355367928764332 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859532} + m_Layer: 0 + m_Name: ArmPosition_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859532 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764332} + m_LocalRotation: {x: 0.0025357183, y: -0.0073521743, z: -0.47313836, w: 0.88095385} + m_LocalPosition: {x: -2.423428, y: -55.614994, z: 120.53} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859576} + - {fileID: 6633355367928859632} + m_Father: {fileID: 6633355367928859552} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764334 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859534} + m_Layer: 0 + m_Name: ArmPosition_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859534 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764334} + m_LocalRotation: {x: 0.0073521743, y: 0.002535718, z: 0.88095385, w: 0.47313833} + m_LocalPosition: {x: -2.423428, y: -55.614994, z: -120.53} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859552} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764336 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859536} + m_Layer: 0 + m_Name: Foot_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859536 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764336} + m_LocalRotation: {x: 0.8193084, y: 0.56578285, z: -0.02233458, w: 0.09013736} + m_LocalPosition: {x: -62.734695, y: 2.842171e-14, z: -1.7763568e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859624} + m_Father: {fileID: 6633355367928859580} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764338 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859538} + m_Layer: 0 + m_Name: Chest + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859538 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764338} + m_LocalRotation: {x: 3.1378165e-33, y: -5.551116e-17, z: 0.18862787, w: 0.9820487} + m_LocalPosition: {x: -24.042006, y: -1.0658141e-14, z: 1.9451509e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859528} + - {fileID: 6633355367928859590} + - {fileID: 6633355367928859606} + - {fileID: 6633355367928859604} + m_Father: {fileID: 6633355367928859622} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764340 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859540} + - component: {fileID: 6633355367931943584} + m_Layer: 0 + m_Name: Body1 + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859540 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764340} + m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -0.0000018693923, y: -129.49721, z: -6.0212374} + m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} + m_Children: [] + m_Father: {fileID: 6633355367928859588} + m_RootOrder: 3 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!137 &6633355367931943584 +SkinnedMeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764340} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 0 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 78660c09ff101ea4cb77de408a9527da, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 0 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 + serializedVersion: 2 + m_Quality: 0 + m_UpdateWhenOffscreen: 0 + m_SkinnedMotionVectors: 1 + m_Mesh: {fileID: 4300002, guid: 860793eb3324391468f1c120a75ec049, type: 3} + m_Bones: + - {fileID: 6633355367928859552} + - {fileID: 6633355367928859622} + - {fileID: 6633355367928859538} + - {fileID: 6633355367928859590} + - {fileID: 6633355367928859544} + - {fileID: 6633355367928859556} + - {fileID: 6633355367928859554} + - {fileID: 6633355367928859606} + - {fileID: 6633355367928859646} + - {fileID: 6633355367928859568} + - {fileID: 6633355367928859548} + - {fileID: 6633355367928859594} + - {fileID: 6633355367928859598} + - {fileID: 6633355367928859586} + - {fileID: 6633355367928859574} + - {fileID: 6633355367928859562} + - {fileID: 6633355367928859566} + - {fileID: 6633355367928859628} + - {fileID: 6633355367928859616} + - {fileID: 6633355367928859620} + - {fileID: 6633355367928859610} + - {fileID: 6633355367928859614} + - {fileID: 6633355367928859602} + - {fileID: 6633355367928859604} + - {fileID: 6633355367928859644} + - {fileID: 6633355367928859582} + - {fileID: 6633355367928859546} + - {fileID: 6633355367928859592} + - {fileID: 6633355367928859596} + - {fileID: 6633355367928859584} + - {fileID: 6633355367928859572} + - {fileID: 6633355367928859560} + - {fileID: 6633355367928859564} + - {fileID: 6633355367928859626} + - {fileID: 6633355367928859630} + - {fileID: 6633355367928859618} + - {fileID: 6633355367928859608} + - {fileID: 6633355367928859612} + - {fileID: 6633355367928859600} + - {fileID: 6633355367928859528} + - {fileID: 6633355367928859524} + - {fileID: 6633355367928859520} + - {fileID: 6633355367928859522} + - {fileID: 6633355367928859642} + - {fileID: 6633355367928859580} + - {fileID: 6633355367928859536} + - {fileID: 6633355367928859624} + - {fileID: 6633355367928859636} + - {fileID: 6633355367928859640} + - {fileID: 6633355367928859578} + - {fileID: 6633355367928859550} + - {fileID: 6633355367928859638} + - {fileID: 6633355367928859634} + - {fileID: 6633355367928859532} + - {fileID: 6633355367928859576} + - {fileID: 6633355367928859632} + - {fileID: 6633355367928859534} + m_BlendShapeWeights: [] + m_RootBone: {fileID: 6633355367928859552} + m_AABB: + m_Center: {x: -31.982391, y: 2.5145874, z: 0.0000019073486} + m_Extent: {x: 51.12609, y: 46.604404, z: 27.749142} + m_DirtyAABB: 0 +--- !u!1 &6633355367928764342 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859542} + - component: {fileID: 6633355367931943586} + m_Layer: 0 + m_Name: Backpack1 + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859542 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764342} + m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -0, y: -138.49533, z: 34.48629} + m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} + m_Children: [] + m_Father: {fileID: 6633355367928859588} + m_RootOrder: 2 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!137 &6633355367931943586 +SkinnedMeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764342} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 0 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 78660c09ff101ea4cb77de408a9527da, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 0 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 + serializedVersion: 2 + m_Quality: 0 + m_UpdateWhenOffscreen: 0 + m_SkinnedMotionVectors: 1 + m_Mesh: {fileID: 4300004, guid: 860793eb3324391468f1c120a75ec049, type: 3} + m_Bones: + - {fileID: 6633355367928859552} + - {fileID: 6633355367928859622} + - {fileID: 6633355367928859538} + - {fileID: 6633355367928859590} + - {fileID: 6633355367928859544} + - {fileID: 6633355367928859556} + - {fileID: 6633355367928859554} + - {fileID: 6633355367928859606} + - {fileID: 6633355367928859646} + - {fileID: 6633355367928859568} + - {fileID: 6633355367928859548} + - {fileID: 6633355367928859594} + - {fileID: 6633355367928859598} + - {fileID: 6633355367928859586} + - {fileID: 6633355367928859574} + - {fileID: 6633355367928859562} + - {fileID: 6633355367928859566} + - {fileID: 6633355367928859628} + - {fileID: 6633355367928859616} + - {fileID: 6633355367928859620} + - {fileID: 6633355367928859610} + - {fileID: 6633355367928859614} + - {fileID: 6633355367928859602} + - {fileID: 6633355367928859604} + - {fileID: 6633355367928859644} + - {fileID: 6633355367928859582} + - {fileID: 6633355367928859546} + - {fileID: 6633355367928859592} + - {fileID: 6633355367928859596} + - {fileID: 6633355367928859584} + - {fileID: 6633355367928859572} + - {fileID: 6633355367928859560} + - {fileID: 6633355367928859564} + - {fileID: 6633355367928859626} + - {fileID: 6633355367928859630} + - {fileID: 6633355367928859618} + - {fileID: 6633355367928859608} + - {fileID: 6633355367928859612} + - {fileID: 6633355367928859600} + - {fileID: 6633355367928859528} + - {fileID: 6633355367928859524} + - {fileID: 6633355367928859520} + - {fileID: 6633355367928859522} + - {fileID: 6633355367928859642} + - {fileID: 6633355367928859580} + - {fileID: 6633355367928859536} + - {fileID: 6633355367928859624} + - {fileID: 6633355367928859636} + - {fileID: 6633355367928859640} + - {fileID: 6633355367928859578} + - {fileID: 6633355367928859550} + - {fileID: 6633355367928859638} + - {fileID: 6633355367928859634} + - {fileID: 6633355367928859532} + - {fileID: 6633355367928859576} + - {fileID: 6633355367928859632} + - {fileID: 6633355367928859534} + m_BlendShapeWeights: [] + m_RootBone: {fileID: 6633355367928859552} + m_AABB: + m_Center: {x: -28.89946, y: 44.769905, z: -1.5718498} + m_Extent: {x: 47.82257, y: 32.29384, z: 42.822285} + m_DirtyAABB: 0 +--- !u!1 &6633355367928764344 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859544} + m_Layer: 0 + m_Name: Head + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859544 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764344} + m_LocalRotation: {x: 0.071218304, y: -0.028954746, z: -0.059960153, w: 0.99523586} + m_LocalPosition: {x: -13.255574, y: -1.4210855e-14, z: 7.364747e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859556} + - {fileID: 6633355367928859554} + m_Father: {fileID: 6633355367928859590} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764346 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859546} + m_Layer: 0 + m_Name: Hand_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859546 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764346} + m_LocalRotation: {x: -0.0005926622, y: 0.085287675, z: -0.0069382307, w: 0.99633205} + m_LocalPosition: {x: -37.441414, y: 1.7053026e-13, z: -5.3290705e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859572} + - {fileID: 6633355367928859592} + - {fileID: 6633355367928859626} + m_Father: {fileID: 6633355367928859582} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764348 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859548} + m_Layer: 0 + m_Name: Hand_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859548 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764348} + m_LocalRotation: {x: 0.0005932963, y: -0.08528753, z: -0.006930911, w: 0.9963321} + m_LocalPosition: {x: -37.441498, y: 8.526513e-14, z: 2.7533531e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859574} + - {fileID: 6633355367928859594} + - {fileID: 6633355367928859628} + m_Father: {fileID: 6633355367928859568} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764350 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859550} + m_Layer: 0 + m_Name: Foot_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859550 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764350} + m_LocalRotation: {x: 0.81892335, y: 0.5656828, z: 0.024710972, w: -0.09357782} + m_LocalPosition: {x: -62.73464, y: -9.769963e-15, z: 2.4868996e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859638} + m_Father: {fileID: 6633355367928859578} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764352 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859552} + m_Layer: 0 + m_Name: Hips + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859552 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764352} + m_LocalRotation: {x: -0.4582423, y: 0.5385295, z: -0.4582423, w: 0.5385295} + m_LocalPosition: {x: -0, y: 1.0650933, z: 0.038611155} + m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} + m_Children: + - {fileID: 6633355367928859534} + - {fileID: 6633355367928859532} + - {fileID: 6633355367928859622} + - {fileID: 6633355367928859642} + - {fileID: 6633355367928859640} + m_Father: {fileID: 6633355367928859588} + m_RootOrder: 5 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764354 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859554} + m_Layer: 0 + m_Name: Headgear_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859554 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764354} + m_LocalRotation: {x: -0.5234401, y: 0.5463846, z: 0.4754055, w: 0.44884726} + m_LocalPosition: {x: 1.3088212, y: 3.871166, z: 12.853171} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859544} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764356 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859556} + m_Layer: 0 + m_Name: Headgear_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859556 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764356} + m_LocalRotation: {x: 0.47540557, y: -0.44884732, z: 0.5234402, w: 0.54638463} + m_LocalPosition: {x: -8.526513e-14, y: -6.550316e-15, z: -13.487081} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859544} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764358 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859558} + - component: {fileID: 6633355367931943598} + m_Layer: 0 + m_Name: head1 + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859558 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764358} + m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -0, y: -176.9416, z: -6.1230974} + m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} + m_Children: [] + m_Father: {fileID: 6633355367928859588} + m_RootOrder: 4 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!137 &6633355367931943598 +SkinnedMeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764358} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 0 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 78660c09ff101ea4cb77de408a9527da, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 0 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 + serializedVersion: 2 + m_Quality: 0 + m_UpdateWhenOffscreen: 0 + m_SkinnedMotionVectors: 1 + m_Mesh: {fileID: 4300000, guid: 860793eb3324391468f1c120a75ec049, type: 3} + m_Bones: + - {fileID: 6633355367928859552} + - {fileID: 6633355367928859622} + - {fileID: 6633355367928859538} + - {fileID: 6633355367928859590} + - {fileID: 6633355367928859544} + - {fileID: 6633355367928859556} + - {fileID: 6633355367928859554} + - {fileID: 6633355367928859606} + - {fileID: 6633355367928859646} + - {fileID: 6633355367928859568} + - {fileID: 6633355367928859548} + - {fileID: 6633355367928859594} + - {fileID: 6633355367928859598} + - {fileID: 6633355367928859586} + - {fileID: 6633355367928859574} + - {fileID: 6633355367928859562} + - {fileID: 6633355367928859566} + - {fileID: 6633355367928859628} + - {fileID: 6633355367928859616} + - {fileID: 6633355367928859620} + - {fileID: 6633355367928859610} + - {fileID: 6633355367928859614} + - {fileID: 6633355367928859602} + - {fileID: 6633355367928859604} + - {fileID: 6633355367928859644} + - {fileID: 6633355367928859582} + - {fileID: 6633355367928859546} + - {fileID: 6633355367928859592} + - {fileID: 6633355367928859596} + - {fileID: 6633355367928859584} + - {fileID: 6633355367928859572} + - {fileID: 6633355367928859560} + - {fileID: 6633355367928859564} + - {fileID: 6633355367928859626} + - {fileID: 6633355367928859630} + - {fileID: 6633355367928859618} + - {fileID: 6633355367928859608} + - {fileID: 6633355367928859612} + - {fileID: 6633355367928859600} + - {fileID: 6633355367928859528} + - {fileID: 6633355367928859524} + - {fileID: 6633355367928859520} + - {fileID: 6633355367928859522} + - {fileID: 6633355367928859642} + - {fileID: 6633355367928859580} + - {fileID: 6633355367928859536} + - {fileID: 6633355367928859624} + - {fileID: 6633355367928859636} + - {fileID: 6633355367928859640} + - {fileID: 6633355367928859578} + - {fileID: 6633355367928859550} + - {fileID: 6633355367928859638} + - {fileID: 6633355367928859634} + - {fileID: 6633355367928859532} + - {fileID: 6633355367928859576} + - {fileID: 6633355367928859632} + - {fileID: 6633355367928859534} + m_BlendShapeWeights: [] + m_RootBone: {fileID: 6633355367928859552} + m_AABB: + m_Center: {x: -71.52329, y: 10.616158, z: -1.0935726} + m_Extent: {x: 26.513672, y: 26.529358, z: 17.550735} + m_DirtyAABB: 0 +--- !u!1 &6633355367928764360 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859560} + m_Layer: 0 + m_Name: Index_Intermediate_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859560 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764360} + m_LocalRotation: {x: -0.017963478, y: -0.041126773, z: 0.25885317, w: 0.96487355} + m_LocalPosition: {x: -10.023805, y: -1.4210855e-13, z: -1.7763568e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859564} + m_Father: {fileID: 6633355367928859572} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764362 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859562} + m_Layer: 0 + m_Name: Index_Intermediate_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859562 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764362} + m_LocalRotation: {x: 0.017964862, y: 0.041127786, z: 0.25889415, w: 0.96486247} + m_LocalPosition: {x: -10.023557, y: -1.4210855e-13, z: 8.881784e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859566} + m_Father: {fileID: 6633355367928859574} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764364 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859564} + m_Layer: 0 + m_Name: Index_Distal_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859564 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764364} + m_LocalRotation: {x: 0.012835801, y: 0.013473716, z: 0.08711711, w: 0.99602425} + m_LocalPosition: {x: -7.9785438, y: 1.7053026e-13, z: 4.440892e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859560} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764366 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859566} + m_Layer: 0 + m_Name: Index_Distal_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859566 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764366} + m_LocalRotation: {x: -0.013842603, y: -0.01243883, z: 0.16401216, w: 0.98628277} + m_LocalPosition: {x: -7.9783425, y: 5.684342e-14, z: -1.7763568e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859562} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764368 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859568} + m_Layer: 0 + m_Name: LowerArm_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859568 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764368} + m_LocalRotation: {x: -1.11818e-29, y: 0.049585894, z: 1.0904029e-28, w: 0.9987699} + m_LocalPosition: {x: -33.66694, y: 2.842171e-14, z: 4.2188475e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859548} + m_Father: {fileID: 6633355367928859646} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764370 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859570} + - component: {fileID: 6633355367931943596} + m_Layer: 0 + m_Name: Leg1 + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859570 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764370} + m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -0, y: -96.69518, z: -8.197696} + m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} + m_Children: [] + m_Father: {fileID: 6633355367928859588} + m_RootOrder: 6 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!137 &6633355367931943596 +SkinnedMeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764370} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 0 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 78660c09ff101ea4cb77de408a9527da, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 0 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 + serializedVersion: 2 + m_Quality: 0 + m_UpdateWhenOffscreen: 0 + m_SkinnedMotionVectors: 1 + m_Mesh: {fileID: 4300008, guid: 860793eb3324391468f1c120a75ec049, type: 3} + m_Bones: + - {fileID: 6633355367928859552} + - {fileID: 6633355367928859622} + - {fileID: 6633355367928859538} + - {fileID: 6633355367928859590} + - {fileID: 6633355367928859544} + - {fileID: 6633355367928859556} + - {fileID: 6633355367928859554} + - {fileID: 6633355367928859606} + - {fileID: 6633355367928859646} + - {fileID: 6633355367928859568} + - {fileID: 6633355367928859548} + - {fileID: 6633355367928859594} + - {fileID: 6633355367928859598} + - {fileID: 6633355367928859586} + - {fileID: 6633355367928859574} + - {fileID: 6633355367928859562} + - {fileID: 6633355367928859566} + - {fileID: 6633355367928859628} + - {fileID: 6633355367928859616} + - {fileID: 6633355367928859620} + - {fileID: 6633355367928859610} + - {fileID: 6633355367928859614} + - {fileID: 6633355367928859602} + - {fileID: 6633355367928859604} + - {fileID: 6633355367928859644} + - {fileID: 6633355367928859582} + - {fileID: 6633355367928859546} + - {fileID: 6633355367928859592} + - {fileID: 6633355367928859596} + - {fileID: 6633355367928859584} + - {fileID: 6633355367928859572} + - {fileID: 6633355367928859560} + - {fileID: 6633355367928859564} + - {fileID: 6633355367928859626} + - {fileID: 6633355367928859630} + - {fileID: 6633355367928859618} + - {fileID: 6633355367928859608} + - {fileID: 6633355367928859612} + - {fileID: 6633355367928859600} + - {fileID: 6633355367928859528} + - {fileID: 6633355367928859524} + - {fileID: 6633355367928859520} + - {fileID: 6633355367928859522} + - {fileID: 6633355367928859642} + - {fileID: 6633355367928859580} + - {fileID: 6633355367928859536} + - {fileID: 6633355367928859624} + - {fileID: 6633355367928859636} + - {fileID: 6633355367928859640} + - {fileID: 6633355367928859578} + - {fileID: 6633355367928859550} + - {fileID: 6633355367928859638} + - {fileID: 6633355367928859634} + - {fileID: 6633355367928859532} + - {fileID: 6633355367928859576} + - {fileID: 6633355367928859632} + - {fileID: 6633355367928859534} + m_BlendShapeWeights: [] + m_RootBone: {fileID: 6633355367928859552} + m_AABB: + m_Center: {x: 53.24073, y: -15.325765, z: 0.17397118} + m_Extent: {x: 69.29029, y: 36.12146, z: 51.77101} + m_DirtyAABB: 0 +--- !u!1 &6633355367928764372 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859572} + m_Layer: 0 + m_Name: Index_Proximal_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859572 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764372} + m_LocalRotation: {x: -0.0028644414, y: -0.042128764, z: 0.08165133, w: 0.99576604} + m_LocalPosition: {x: -13.357139, y: -0.09103328, z: -13.440055} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859560} + m_Father: {fileID: 6633355367928859546} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764374 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859574} + m_Layer: 0 + m_Name: Index_Proximal_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859574 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764374} + m_LocalRotation: {x: 0.0028642316, y: 0.04212831, z: 0.08163239, w: 0.9957676} + m_LocalPosition: {x: -13.357445, y: -0.09144714, z: 13.440094} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859562} + m_Father: {fileID: 6633355367928859548} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764376 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859576} + m_Layer: 0 + m_Name: magazine_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859576 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764376} + m_LocalRotation: {x: -0.4881349, y: 0.5173824, z: 0.48743477, w: 0.5064061} + m_LocalPosition: {x: -0.004472253, y: -40.147858, z: 0.03500123} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859532} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764378 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859578} + m_Layer: 0 + m_Name: LowerLeg_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859578 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764378} + m_LocalRotation: {x: 0.66088396, y: 0.05163587, z: -0.04218814, w: 0.74752015} + m_LocalPosition: {x: -33.437313, y: 4.973799e-14, z: -8.881784e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859550} + m_Father: {fileID: 6633355367928859640} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764380 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859580} + m_Layer: 0 + m_Name: LowerLeg_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859580 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764380} + m_LocalRotation: {x: -0.6640286, y: -0.051458772, z: -0.042404156, w: 0.74472815} + m_LocalPosition: {x: -33.436848, y: -2.4868996e-14, z: 6.217249e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859536} + m_Father: {fileID: 6633355367928859642} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764382 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859582} + m_Layer: 0 + m_Name: LowerArm_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859582 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764382} + m_LocalRotation: {x: 4.9720758e-29, y: -0.049585804, z: 1.0024346e-27, w: 0.9987699} + m_LocalPosition: {x: -33.666927, y: 0, z: -1.4432899e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859546} + m_Father: {fileID: 6633355367928859644} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764384 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859584} + m_Layer: 0 + m_Name: RestOfFingers_Distal_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859584 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764384} + m_LocalRotation: {x: 0.0041774614, y: 0.0038508072, z: 0.015434083, w: 0.99986476} + m_LocalPosition: {x: -8.174185, y: 2.842171e-14, z: 2.6645353e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859596} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764386 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859586} + m_Layer: 0 + m_Name: RestOfFingers_Distal_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859586 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764386} + m_LocalRotation: {x: -0.004532425, y: -0.003425033, z: 0.11285209, w: 0.9935956} + m_LocalPosition: {x: -8.173605, y: -8.526513e-14, z: -3.5527137e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859598} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764388 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859588} + m_Layer: 0 + m_Name: Character + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859588 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764388} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 44.336212, y: 0.6996672, z: -10.997339} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859526} + - {fileID: 6633355367928859530} + - {fileID: 6633355367928859542} + - {fileID: 6633355367928859540} + - {fileID: 6633355367928859558} + - {fileID: 6633355367928859552} + - {fileID: 6633355367928859570} + m_Father: {fileID: 0} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764390 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859590} + m_Layer: 0 + m_Name: Neck + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859590 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764390} + m_LocalRotation: {x: 0.9631089, y: -0.26911193, z: 1.6478353e-17, w: -5.897341e-17} + m_LocalPosition: {x: -17.7523, y: -2.842171e-14, z: 0.000000026610966} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859544} + m_Father: {fileID: 6633355367928859538} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764392 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859592} + m_Layer: 0 + m_Name: RestOfFingers_Proximal_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859592 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764392} + m_LocalRotation: {x: -0.0028322286, y: -0.060177784, z: 0.060808677, w: 0.9963297} + m_LocalPosition: {x: -18.094389, y: 8.526513e-14, z: -1.0658141e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859596} + m_Father: {fileID: 6633355367928859546} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764394 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859594} + m_Layer: 0 + m_Name: RestOfFingers_Proximal_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859594 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764394} + m_LocalRotation: {x: 0.0028340812, y: 0.060178064, z: 0.060824323, w: 0.9963288} + m_LocalPosition: {x: -18.094444, y: 5.684342e-14, z: 1.7763568e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859598} + m_Father: {fileID: 6633355367928859548} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764396 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859596} + m_Layer: 0 + m_Name: RestOfFingers_Intermediate_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859596 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764396} + m_LocalRotation: {x: -0.0061721927, y: -0.012479491, z: 0.3441051, w: 0.93882793} + m_LocalPosition: {x: -8.025833, y: -1.7053026e-13, z: 4.8849813e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859584} + m_Father: {fileID: 6633355367928859592} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764398 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859598} + m_Layer: 0 + m_Name: RestOfFingers_Intermediate_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859598 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764398} + m_LocalRotation: {x: 0.0061717043, y: 0.012478555, z: 0.3440602, w: 0.9388444} + m_LocalPosition: {x: -8.02586, y: -1.1368684e-13, z: -5.3290705e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859586} + m_Father: {fileID: 6633355367928859594} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764400 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859600} + m_Layer: 0 + m_Name: ShoulderPadBlade_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859600 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764400} + m_LocalRotation: {x: -0.0066352007, y: 0.011333432, z: 0.50519216, w: 0.86290693} + m_LocalPosition: {x: -13.296815, y: -22.177006, z: 0.5073527} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859608} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764402 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859602} + m_Layer: 0 + m_Name: ShoulderPadBlade_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859602 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764402} + m_LocalRotation: {x: 0.0066350633, y: -0.011333145, z: 0.5051939, w: 0.862906} + m_LocalPosition: {x: -13.296959, y: -22.176832, z: -0.5073402} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859610} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764404 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859604} + m_Layer: 0 + m_Name: Shoulder_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859604 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764404} + m_LocalRotation: {x: -0.29884863, y: 0.73412436, z: -0.41826612, w: -0.4436265} + m_LocalPosition: {x: -12.348376, y: -6.29761, z: -18.7061} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859608} + - {fileID: 6633355367928859644} + m_Father: {fileID: 6633355367928859538} + m_RootOrder: 3 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764406 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859606} + m_Layer: 0 + m_Name: Shoulder_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859606 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764406} + m_LocalRotation: {x: -0.2988425, y: 0.7341269, z: 0.41826975, w: 0.4436229} + m_LocalPosition: {x: -12.348807, y: -6.29741, z: 18.706108} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859610} + - {fileID: 6633355367928859646} + m_Father: {fileID: 6633355367928859538} + m_RootOrder: 2 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764408 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859608} + m_Layer: 0 + m_Name: ShoulderPadCTRL_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859608 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764408} + m_LocalRotation: {x: -0.039328646, y: 0.10945006, z: -0.6506794, w: 0.7503934} + m_LocalPosition: {x: -9.938715, y: 14.657999, z: 1.039447} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859600} + - {fileID: 6633355367928859612} + m_Father: {fileID: 6633355367928859604} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764410 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859610} + m_Layer: 0 + m_Name: ShoulderPadCTRL_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859610 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764410} + m_LocalRotation: {x: 0.039328095, y: -0.109450735, z: -0.65068716, w: 0.75038666} + m_LocalPosition: {x: -9.938806, y: 14.657373, z: -1.0394562} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6633355367928859602} + - {fileID: 6633355367928859614} + m_Father: {fileID: 6633355367928859606} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764412 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859612} + m_Layer: 0 + m_Name: ShoulderPadBody_Right + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859612 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764412} + m_LocalRotation: {x: -0.0066352007, y: 0.011333432, z: 0.50519216, w: 0.86290693} + m_LocalPosition: {x: -16.02375, y: -6.3948846e-14, z: 1.2878587e-14} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859608} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &6633355367928764414 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6633355367928859614} + m_Layer: 0 + m_Name: ShoulderPadBody_Left + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6633355367928859614 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6633355367928764414} + m_LocalRotation: {x: 0.0066350633, y: -0.011333145, z: 0.5051939, w: 0.862906} + m_LocalPosition: {x: -16.023825, y: -1.4210855e-14, z: 2.6645353e-15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 6633355367928859610} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} diff --git a/Assets/ML-Agents/Timers/dont touch me plz_timers.json.meta b/Assets/Prefabs/CharacterPrefabs/Character.prefab.meta old mode 100755 new mode 100644 similarity index 62% rename from Assets/ML-Agents/Timers/dont touch me plz_timers.json.meta rename to Assets/Prefabs/CharacterPrefabs/Character.prefab.meta index db7171f..4a4509a --- a/Assets/ML-Agents/Timers/dont touch me plz_timers.json.meta +++ b/Assets/Prefabs/CharacterPrefabs/Character.prefab.meta @@ -1,6 +1,6 @@ fileFormatVersion: 2 -guid: 249f8a9af2b0bc041a08a0009a6fdf44 -TextScriptImporter: +guid: 0d0b10717aa98fe44bcf651ad0901a89 +PrefabImporter: externalObjects: {} userData: assetBundleName: diff --git a/Assets/Prefabs/Player.prefab b/Assets/Prefabs/CharacterPrefabs/Player.prefab old mode 100755 new mode 100644 similarity index 100% rename from Assets/Prefabs/Player.prefab rename to Assets/Prefabs/CharacterPrefabs/Player.prefab diff --git a/Assets/Prefabs/Player.prefab.meta b/Assets/Prefabs/CharacterPrefabs/Player.prefab.meta similarity index 100% rename from Assets/Prefabs/Player.prefab.meta rename to Assets/Prefabs/CharacterPrefabs/Player.prefab.meta diff --git a/Assets/Prefabs/EntityPrefabs.meta b/Assets/Prefabs/EntityPrefabs.meta new file mode 100644 index 0000000..a069136 --- /dev/null +++ b/Assets/Prefabs/EntityPrefabs.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 61923742bbdb59f45816f6d760980fc4 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Prefabs/Flag zone.prefab b/Assets/Prefabs/EntityPrefabs/Flag zone.prefab old mode 100755 new mode 100644 similarity index 100% rename from Assets/Prefabs/Flag zone.prefab rename to Assets/Prefabs/EntityPrefabs/Flag zone.prefab diff --git a/Assets/Prefabs/Flag zone.prefab.meta b/Assets/Prefabs/EntityPrefabs/Flag zone.prefab.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Prefabs/Flag zone.prefab.meta rename to Assets/Prefabs/EntityPrefabs/Flag zone.prefab.meta diff --git a/Assets/Prefabs/EntityPrefabs/navPointPrefab.prefab b/Assets/Prefabs/EntityPrefabs/navPointPrefab.prefab new file mode 100644 index 0000000..afd2291 --- /dev/null +++ b/Assets/Prefabs/EntityPrefabs/navPointPrefab.prefab @@ -0,0 +1,49 @@ +%YAML 1.1 +%TAG !u! tag:unity3d.com,2011: +--- !u!1 &7378066357312323927 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 7378066357312323926} + - component: {fileID: 7378066357312323925} + m_Layer: 0 + m_Name: navPointPrefab + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &7378066357312323926 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 7378066357312323927} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 0} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &7378066357312323925 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 7378066357312323927} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: + navType: 1 + PointId: 0 + DeathAttr: 0 + EnemiesSeen: [] diff --git a/Assets/Prefabs/EntityPrefabs/navPointPrefab.prefab.meta b/Assets/Prefabs/EntityPrefabs/navPointPrefab.prefab.meta new file mode 100644 index 0000000..f826d76 --- /dev/null +++ b/Assets/Prefabs/EntityPrefabs/navPointPrefab.prefab.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: f58a14c8ba97dfa4f9f4103bbee852d4 +PrefabImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Prefabs/PickupPrefabs.meta b/Assets/Prefabs/PickupPrefabs.meta new file mode 100644 index 0000000..b406ea9 --- /dev/null +++ b/Assets/Prefabs/PickupPrefabs.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 132a5e983df3eaa41bbb1e9bda3dd97e +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Prefabs/ammo.prefab b/Assets/Prefabs/PickupPrefabs/ammo.prefab old mode 100755 new mode 100644 similarity index 100% rename from Assets/Prefabs/ammo.prefab rename to Assets/Prefabs/PickupPrefabs/ammo.prefab diff --git a/Assets/Prefabs/ammo.prefab.meta b/Assets/Prefabs/PickupPrefabs/ammo.prefab.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Prefabs/ammo.prefab.meta rename to Assets/Prefabs/PickupPrefabs/ammo.prefab.meta diff --git a/Assets/Prefabs/armour.prefab b/Assets/Prefabs/PickupPrefabs/armour.prefab old mode 100755 new mode 100644 similarity index 100% rename from Assets/Prefabs/armour.prefab rename to Assets/Prefabs/PickupPrefabs/armour.prefab diff --git a/Assets/Prefabs/armour.prefab.meta b/Assets/Prefabs/PickupPrefabs/armour.prefab.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Prefabs/armour.prefab.meta rename to Assets/Prefabs/PickupPrefabs/armour.prefab.meta diff --git a/Assets/Prefabs/health.prefab b/Assets/Prefabs/PickupPrefabs/health.prefab old mode 100755 new mode 100644 similarity index 100% rename from Assets/Prefabs/health.prefab rename to Assets/Prefabs/PickupPrefabs/health.prefab diff --git a/Assets/Prefabs/health.prefab.meta b/Assets/Prefabs/PickupPrefabs/health.prefab.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Prefabs/health.prefab.meta rename to Assets/Prefabs/PickupPrefabs/health.prefab.meta diff --git a/Assets/Resources.meta b/Assets/Resources.meta new file mode 100644 index 0000000..f303094 --- /dev/null +++ b/Assets/Resources.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 61178e06e35bbb84daabda8d15a50d45 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Resources/BillingMode.json b/Assets/Resources/BillingMode.json new file mode 100644 index 0000000..6f4bfb7 --- /dev/null +++ b/Assets/Resources/BillingMode.json @@ -0,0 +1 @@ +{"androidStore":"GooglePlay"} \ No newline at end of file diff --git a/Assets/ML-Agents/Timers/DemoScene01_timers.json.meta b/Assets/Resources/BillingMode.json.meta old mode 100755 new mode 100644 similarity index 75% rename from Assets/ML-Agents/Timers/DemoScene01_timers.json.meta rename to Assets/Resources/BillingMode.json.meta index fe18e4a..255ee91 --- a/Assets/ML-Agents/Timers/DemoScene01_timers.json.meta +++ b/Assets/Resources/BillingMode.json.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 1b328c4e26e7a994e8a42f269ca7419b +guid: 01004bca71fbe914d9e15e843ac4aca0 TextScriptImporter: externalObjects: {} userData: diff --git a/Assets/Scenes/AnimationTestScene.unity b/Assets/Scenes/AnimationTestScene.unity new file mode 100644 index 0000000..b3ec676 --- /dev/null +++ b/Assets/Scenes/AnimationTestScene.unity @@ -0,0 +1,1060 @@ +%YAML 1.1 +%TAG !u! tag:unity3d.com,2011: +--- !u!29 &1 +OcclusionCullingSettings: + m_ObjectHideFlags: 0 + serializedVersion: 2 + m_OcclusionBakeSettings: + smallestOccluder: 5 + smallestHole: 0.25 + backfaceThreshold: 100 + m_SceneGUID: 00000000000000000000000000000000 + m_OcclusionCullingData: {fileID: 0} +--- !u!104 &2 +RenderSettings: + m_ObjectHideFlags: 0 + serializedVersion: 9 + m_Fog: 0 + m_FogColor: {r: 0.5, g: 0.5, b: 0.5, a: 1} + m_FogMode: 3 + m_FogDensity: 0.01 + m_LinearFogStart: 0 + m_LinearFogEnd: 300 + m_AmbientSkyColor: {r: 0.212, g: 0.227, b: 0.259, a: 1} + m_AmbientEquatorColor: {r: 0.114, g: 0.125, b: 0.133, a: 1} + m_AmbientGroundColor: {r: 0.047, g: 0.043, b: 0.035, a: 1} + m_AmbientIntensity: 1 + m_AmbientMode: 0 + m_SubtractiveShadowColor: {r: 0.42, g: 0.478, b: 0.627, a: 1} + m_SkyboxMaterial: {fileID: 10304, guid: 0000000000000000f000000000000000, type: 0} + m_HaloStrength: 0.5 + m_FlareStrength: 1 + m_FlareFadeSpeed: 3 + m_HaloTexture: {fileID: 0} + m_SpotCookie: {fileID: 10001, guid: 0000000000000000e000000000000000, type: 0} + m_DefaultReflectionMode: 0 + m_DefaultReflectionResolution: 128 + m_ReflectionBounces: 1 + m_ReflectionIntensity: 1 + m_CustomReflection: {fileID: 0} + m_Sun: {fileID: 0} + m_IndirectSpecularColor: {r: 0.44657898, g: 0.4964133, b: 0.5748178, a: 1} + m_UseRadianceAmbientProbe: 0 +--- !u!157 &3 +LightmapSettings: + m_ObjectHideFlags: 0 + serializedVersion: 11 + m_GIWorkflowMode: 1 + m_GISettings: + serializedVersion: 2 + m_BounceScale: 1 + m_IndirectOutputScale: 1 + m_AlbedoBoost: 1 + m_EnvironmentLightingMode: 0 + m_EnableBakedLightmaps: 1 + m_EnableRealtimeLightmaps: 0 + m_LightmapEditorSettings: + serializedVersion: 12 + m_Resolution: 2 + m_BakeResolution: 40 + m_AtlasSize: 1024 + m_AO: 0 + m_AOMaxDistance: 1 + m_CompAOExponent: 1 + m_CompAOExponentDirect: 0 + m_ExtractAmbientOcclusion: 0 + m_Padding: 2 + m_LightmapParameters: {fileID: 0} + m_LightmapsBakeMode: 1 + m_TextureCompression: 1 + m_FinalGather: 0 + m_FinalGatherFiltering: 1 + m_FinalGatherRayCount: 256 + m_ReflectionCompression: 2 + m_MixedBakeMode: 2 + m_BakeBackend: 1 + m_PVRSampling: 1 + m_PVRDirectSampleCount: 32 + m_PVRSampleCount: 512 + m_PVRBounces: 2 + m_PVREnvironmentSampleCount: 256 + m_PVREnvironmentReferencePointCount: 2048 + m_PVRFilteringMode: 1 + m_PVRDenoiserTypeDirect: 1 + m_PVRDenoiserTypeIndirect: 1 + m_PVRDenoiserTypeAO: 1 + m_PVRFilterTypeDirect: 0 + m_PVRFilterTypeIndirect: 0 + m_PVRFilterTypeAO: 0 + m_PVREnvironmentMIS: 1 + m_PVRCulling: 1 + m_PVRFilteringGaussRadiusDirect: 1 + m_PVRFilteringGaussRadiusIndirect: 5 + m_PVRFilteringGaussRadiusAO: 2 + m_PVRFilteringAtrousPositionSigmaDirect: 0.5 + m_PVRFilteringAtrousPositionSigmaIndirect: 2 + m_PVRFilteringAtrousPositionSigmaAO: 1 + m_ExportTrainingData: 0 + m_TrainingDataDestination: TrainingData + m_LightProbeSampleCountMultiplier: 4 + m_LightingDataAsset: {fileID: 0} + m_UseShadowmask: 1 +--- !u!196 &4 +NavMeshSettings: + serializedVersion: 2 + m_ObjectHideFlags: 0 + m_BuildSettings: + serializedVersion: 2 + agentTypeID: 0 + agentRadius: 0.5 + agentHeight: 2 + agentSlope: 45 + agentClimb: 0.4 + ledgeDropHeight: 0 + maxJumpAcrossDistance: 0 + minRegionArea: 2 + manualCellSize: 0 + cellSize: 0.16666667 + manualTileSize: 0 + tileSize: 256 + accuratePlacement: 0 + debug: + m_Flags: 0 + m_NavMeshData: {fileID: 0} +--- !u!1 &93904468 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 93904472} + - component: {fileID: 93904471} + - component: {fileID: 93904470} + - component: {fileID: 93904469} + m_Layer: 0 + m_Name: Plane (1) + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!64 &93904469 +MeshCollider: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 93904468} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 1 + serializedVersion: 4 + m_Convex: 0 + m_CookingOptions: 30 + m_Mesh: {fileID: 10209, guid: 0000000000000000e000000000000000, type: 0} +--- !u!23 &93904470 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 93904468} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &93904471 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 93904468} + m_Mesh: {fileID: 10209, guid: 0000000000000000e000000000000000, type: 0} +--- !u!4 &93904472 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 93904468} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 0, y: -0.53, z: 10.04} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 0} + m_RootOrder: 4 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &738827739 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 738827741} + - component: {fileID: 738827740} + m_Layer: 0 + m_Name: Camera Holder + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!114 &738827740 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 738827739} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 3d606407023147d7b4d530a9593e9697, type: 3} + m_Name: + m_EditorClassIdentifier: + targetTransform: {fileID: 7072444062111317995} + cameraTransform: {fileID: 1403458766} + cameraPivotTransform: {fileID: 1054021701} + minimumPivot: -35 + maximumPivot: 35 +--- !u!4 &738827741 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 738827739} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 1054021701} + m_Father: {fileID: 0} + m_RootOrder: 3 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &1054021700 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1054021701} + m_Layer: 0 + m_Name: Camera Pivot + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1054021701 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1054021700} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 1403458766} + m_Father: {fileID: 738827741} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &1225778554 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1225778558} + - component: {fileID: 1225778557} + - component: {fileID: 1225778556} + - component: {fileID: 1225778555} + m_Layer: 0 + m_Name: Plane + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!64 &1225778555 +MeshCollider: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1225778554} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 1 + serializedVersion: 4 + m_Convex: 0 + m_CookingOptions: 30 + m_Mesh: {fileID: 10209, guid: 0000000000000000e000000000000000, type: 0} +--- !u!23 &1225778556 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1225778554} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1225778557 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1225778554} + m_Mesh: {fileID: 10209, guid: 0000000000000000e000000000000000, type: 0} +--- !u!4 &1225778558 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1225778554} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 0} + m_RootOrder: 2 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &1403458763 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1403458766} + - component: {fileID: 1403458764} + m_Layer: 0 + m_Name: Main Camera + m_TagString: MainCamera + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!20 &1403458764 +Camera: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1403458763} + m_Enabled: 1 + serializedVersion: 2 + m_ClearFlags: 1 + m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} + m_projectionMatrixMode: 1 + m_GateFitMode: 2 + m_FOVAxisMode: 0 + m_SensorSize: {x: 36, y: 24} + m_LensShift: {x: 0, y: 0} + m_FocalLength: 50 + m_NormalizedViewPortRect: + serializedVersion: 2 + x: 0 + y: 0 + width: 1 + height: 1 + near clip plane: 0.3 + far clip plane: 1000 + field of view: 26.991467 + orthographic: 0 + orthographic size: 5 + m_Depth: 0 + m_CullingMask: + serializedVersion: 2 + m_Bits: 4294967295 + m_RenderingPath: -1 + m_TargetTexture: {fileID: 0} + m_TargetDisplay: 0 + m_TargetEye: 3 + m_HDR: 1 + m_AllowMSAA: 1 + m_AllowDynamicResolution: 0 + m_ForceIntoRT: 0 + m_OcclusionCulling: 1 + m_StereoConvergence: 10 + m_StereoSeparation: 0.022 +--- !u!4 &1403458766 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1403458763} + m_LocalRotation: {x: 0.08193846, y: 0, z: 0, w: 0.9966374} + m_LocalPosition: {x: 0, y: 3.39, z: -10.29} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 1054021701} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 9.4, y: 0, z: 0} +--- !u!1 &1755358664 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1755358666} + - component: {fileID: 1755358665} + m_Layer: 0 + m_Name: Directional Light + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!108 &1755358665 +Light: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1755358664} + m_Enabled: 1 + serializedVersion: 10 + m_Type: 1 + m_Shape: 0 + m_Color: {r: 1, g: 0.95686275, b: 0.8392157, a: 1} + m_Intensity: 1 + m_Range: 10 + m_SpotAngle: 30 + m_InnerSpotAngle: 21.80208 + m_CookieSize: 10 + m_Shadows: + m_Type: 2 + m_Resolution: -1 + m_CustomResolution: -1 + m_Strength: 1 + m_Bias: 0.05 + m_NormalBias: 0.4 + m_NearPlane: 0.2 + m_CullingMatrixOverride: + e00: 1 + e01: 0 + e02: 0 + e03: 0 + e10: 0 + e11: 1 + e12: 0 + e13: 0 + e20: 0 + e21: 0 + e22: 1 + e23: 0 + e30: 0 + e31: 0 + e32: 0 + e33: 1 + m_UseCullingMatrixOverride: 0 + m_Cookie: {fileID: 0} + m_DrawHalo: 0 + m_Flare: {fileID: 0} + m_RenderMode: 0 + m_CullingMask: + serializedVersion: 2 + m_Bits: 4294967295 + m_RenderingLayerMask: 1 + m_Lightmapping: 4 + m_LightShadowCasterMode: 0 + m_AreaSize: {x: 1, y: 1} + m_BounceIntensity: 1 + m_ColorTemperature: 6570 + m_UseColorTemperature: 0 + m_BoundingSphereOverride: {x: 0, y: 0, z: 0, w: 0} + m_UseBoundingSphereOverride: 0 + m_ShadowRadius: 0 + m_ShadowAngle: 0 +--- !u!4 &1755358666 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1755358664} + m_LocalRotation: {x: 0.40821788, y: -0.23456968, z: 0.10938163, w: 0.8754261} + m_LocalPosition: {x: 0, y: 3, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 0} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 50, y: -30, z: 0} +--- !u!1001 &4478891302374877197 +PrefabInstance: + m_ObjectHideFlags: 0 + serializedVersion: 2 + m_Modification: + m_TransformParent: {fileID: 0} + m_Modifications: + - target: {fileID: 6633355367928763904, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928763906, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928763908, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928763910, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928763912, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928763914, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928763916, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928763918, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928763920, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928763922, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928763924, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928763926, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928763928, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928763930, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928763932, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928763934, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764320, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764322, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764324, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764326, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764328, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764330, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764332, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764334, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764336, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764338, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764340, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764342, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764344, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764346, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764348, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764350, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764352, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764354, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764356, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764358, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764360, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764362, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764364, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764366, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764368, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764370, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764372, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764374, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764376, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764378, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764380, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764382, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764384, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764386, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764388, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Name + value: Character + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764388, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764390, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764392, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764394, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764396, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764398, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764400, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764402, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764404, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764406, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764408, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764410, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764412, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928764414, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_Layer + value: 10 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928859588, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_RootOrder + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928859588, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_LocalPosition.x + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928859588, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_LocalPosition.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928859588, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_LocalPosition.z + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928859588, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_LocalRotation.w + value: 1 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928859588, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_LocalRotation.x + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928859588, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_LocalRotation.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928859588, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_LocalRotation.z + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928859588, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_LocalEulerAnglesHint.x + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928859588, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_LocalEulerAnglesHint.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 6633355367928859588, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + propertyPath: m_LocalEulerAnglesHint.z + value: 0 + objectReference: {fileID: 0} + m_RemovedComponents: [] + m_SourcePrefab: {fileID: 100100000, guid: 0d0b10717aa98fe44bcf651ad0901a89, type: 3} +--- !u!1 &7072444062111317993 stripped +GameObject: + m_CorrespondingSourceObject: {fileID: 6633355367928764388, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + m_PrefabInstance: {fileID: 4478891302374877197} + m_PrefabAsset: {fileID: 0} +--- !u!114 &7072444062111317994 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 7072444062111317993} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: aeafb7b8074141969e8779cd3d4a9d08, type: 3} + m_Name: + m_EditorClassIdentifier: + myTransform: {fileID: 0} + myAnimatorHandler: {fileID: 0} + myRigidbody: {fileID: 0} + jumpForce: 1800 + normalCamera: {fileID: 0} + movementSpeed: 5 + rotationSpeed: 10 +--- !u!4 &7072444062111317995 stripped +Transform: + m_CorrespondingSourceObject: {fileID: 6633355367928859588, guid: 0d0b10717aa98fe44bcf651ad0901a89, + type: 3} + m_PrefabInstance: {fileID: 4478891302374877197} + m_PrefabAsset: {fileID: 0} +--- !u!114 &7072444062111317996 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 7072444062111317993} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 77c38ddfaba349c590d4a6583f7efac4, type: 3} + m_Name: + m_EditorClassIdentifier: + horizontal: 0 + vertical: 0 + moveAmount: 0 + mouseX: 0 + mouseY: 0 + crouchPressed: 0 + jumpPressed: 0 + firePressed: 0 +--- !u!54 &7072444062111317998 +Rigidbody: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 7072444062111317993} + serializedVersion: 2 + m_Mass: 1 + m_Drag: 8 + m_AngularDrag: 20 + m_UseGravity: 1 + m_IsKinematic: 0 + m_Interpolate: 0 + m_Constraints: 80 + m_CollisionDetection: 3 +--- !u!136 &7072444062111318000 +CapsuleCollider: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 7072444062111317993} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 1 + m_Radius: 0.5 + m_Height: 2.1 + m_Direction: 1 + m_Center: {x: 0, y: 1.02, z: 0} +--- !u!95 &7072444062111318002 +Animator: + serializedVersion: 3 + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 7072444062111317993} + m_Enabled: 1 + m_Avatar: {fileID: 9000000, guid: 860793eb3324391468f1c120a75ec049, type: 3} + m_Controller: {fileID: 9100000, guid: 3ebf60422b6cb1c498ee4cf238072b43, type: 2} + m_CullingMode: 1 + m_UpdateMode: 0 + m_ApplyRootMotion: 0 + m_LinearVelocityBlending: 0 + m_WarningMessage: + m_HasTransformHierarchy: 1 + m_AllowConstantClipSamplingOptimization: 1 + m_KeepAnimatorControllerStateOnDisable: 0 +--- !u!114 &7072444062111318004 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 7072444062111317993} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: f9c5f555eb7641518e39a97abe893cd8, type: 3} + m_Name: + m_EditorClassIdentifier: + anim: {fileID: 7072444062111318002} + canRotate: 1 diff --git a/Assets/Scenes/dont touch me plz.unity.meta b/Assets/Scenes/AnimationTestScene.unity.meta old mode 100755 new mode 100644 similarity index 74% rename from Assets/Scenes/dont touch me plz.unity.meta rename to Assets/Scenes/AnimationTestScene.unity.meta index b47f91a..2d1f612 --- a/Assets/Scenes/dont touch me plz.unity.meta +++ b/Assets/Scenes/AnimationTestScene.unity.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: acbb7f09bd8d02a46bf0793e1aafd61f +guid: b107ebb27f1b8dd40b1e67a2d44d1662 DefaultImporter: externalObjects: {} userData: diff --git a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity index 1b0de26..b053540 100644 --- a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity +++ b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity @@ -1724,102 +1724,24 @@ PrefabInstance: objectReference: {fileID: 0} m_RemovedComponents: [] m_SourcePrefab: {fileID: 100100000, guid: 00815eb34d8b87f42b0215bd3dfd7463, type: 3} ---- !u!1 &1345085340 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 1345085341} - - component: {fileID: 1345085344} - - component: {fileID: 1345085343} - - component: {fileID: 1345085342} - m_Layer: 0 - m_Name: navpoint1 - m_TagString: Point - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &1345085341 +--- !u!4 &1345085341 stripped Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} + m_CorrespondingSourceObject: {fileID: 7378066357312323926, guid: f58a14c8ba97dfa4f9f4103bbee852d4, + type: 3} + m_PrefabInstance: {fileID: 7378066356508352203} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 1345085340} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 47.446796, y: -5.782543, z: -25.400002} - m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} - m_Children: [] - m_Father: {fileID: 2060099472} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!114 &1345085342 +--- !u!114 &1345085342 stripped MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} + m_CorrespondingSourceObject: {fileID: 7378066357312323925, guid: f58a14c8ba97dfa4f9f4103bbee852d4, + type: 3} + m_PrefabInstance: {fileID: 7378066356508352203} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 1345085340} + m_GameObject: {fileID: 0} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: - navType: 1 - PointId: 0 - DeathAttr: 0 - EnemiesSeen: [] ---- !u!23 &1345085343 -MeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 1345085340} - m_Enabled: 0 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 2 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 1 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!33 &1345085344 -MeshFilter: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 1345085340} - m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1488699521 GameObject: m_ObjectHideFlags: 0 @@ -2185,6 +2107,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 593162665e908cf4ea4429f8385dc627, type: 3} m_Name: m_EditorClassIdentifier: + _navPoints: [] --- !u!114 &1858987086 MonoBehaviour: m_ObjectHideFlags: 0 @@ -2826,3 +2749,72 @@ PrefabInstance: objectReference: {fileID: 0} m_RemovedComponents: [] m_SourcePrefab: {fileID: 100100000, guid: 1685c1d9ce4ab174f95c646b1826010b, type: 3} +--- !u!1001 &7378066356508352203 +PrefabInstance: + m_ObjectHideFlags: 0 + serializedVersion: 2 + m_Modification: + m_TransformParent: {fileID: 2060099472} + m_Modifications: + - target: {fileID: 7378066357312323926, guid: f58a14c8ba97dfa4f9f4103bbee852d4, + type: 3} + propertyPath: m_RootOrder + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 7378066357312323926, guid: f58a14c8ba97dfa4f9f4103bbee852d4, + type: 3} + propertyPath: m_LocalPosition.x + value: 47.446796 + objectReference: {fileID: 0} + - target: {fileID: 7378066357312323926, guid: f58a14c8ba97dfa4f9f4103bbee852d4, + type: 3} + propertyPath: m_LocalPosition.y + value: -5.782543 + objectReference: {fileID: 0} + - target: {fileID: 7378066357312323926, guid: f58a14c8ba97dfa4f9f4103bbee852d4, + type: 3} + propertyPath: m_LocalPosition.z + value: -25.400002 + objectReference: {fileID: 0} + - target: {fileID: 7378066357312323926, guid: f58a14c8ba97dfa4f9f4103bbee852d4, + type: 3} + propertyPath: m_LocalRotation.w + value: 1 + objectReference: {fileID: 0} + - target: {fileID: 7378066357312323926, guid: f58a14c8ba97dfa4f9f4103bbee852d4, + type: 3} + propertyPath: m_LocalRotation.x + value: -0 + objectReference: {fileID: 0} + - target: {fileID: 7378066357312323926, guid: f58a14c8ba97dfa4f9f4103bbee852d4, + type: 3} + propertyPath: m_LocalRotation.y + value: -0 + objectReference: {fileID: 0} + - target: {fileID: 7378066357312323926, guid: f58a14c8ba97dfa4f9f4103bbee852d4, + type: 3} + propertyPath: m_LocalRotation.z + value: -0 + objectReference: {fileID: 0} + - target: {fileID: 7378066357312323926, guid: f58a14c8ba97dfa4f9f4103bbee852d4, + type: 3} + propertyPath: m_LocalEulerAnglesHint.x + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 7378066357312323926, guid: f58a14c8ba97dfa4f9f4103bbee852d4, + type: 3} + propertyPath: m_LocalEulerAnglesHint.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 7378066357312323926, guid: f58a14c8ba97dfa4f9f4103bbee852d4, + type: 3} + propertyPath: m_LocalEulerAnglesHint.z + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 7378066357312323927, guid: f58a14c8ba97dfa4f9f4103bbee852d4, + type: 3} + propertyPath: m_Name + value: navpoint1 + objectReference: {fileID: 0} + m_RemovedComponents: [] + m_SourcePrefab: {fileID: 100100000, guid: f58a14c8ba97dfa4f9f4103bbee852d4, type: 3} diff --git a/Assets/Scenes/First try.unity b/Assets/Scenes/Map#1.unity old mode 100755 new mode 100644 similarity index 100% rename from Assets/Scenes/First try.unity rename to Assets/Scenes/Map#1.unity diff --git a/Assets/Scenes/First try.unity.meta b/Assets/Scenes/Map#1.unity.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Scenes/First try.unity.meta rename to Assets/Scenes/Map#1.unity.meta diff --git a/Assets/Scenes/dont touch me plz.unity b/Assets/Scenes/dont touch me plz.unity deleted file mode 100755 index 3f3b10d..0000000 --- a/Assets/Scenes/dont touch me plz.unity +++ /dev/null @@ -1,460 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!29 &1 -OcclusionCullingSettings: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_OcclusionBakeSettings: - smallestOccluder: 5 - smallestHole: 0.25 - backfaceThreshold: 100 - m_SceneGUID: 00000000000000000000000000000000 - m_OcclusionCullingData: {fileID: 0} ---- !u!104 &2 -RenderSettings: - m_ObjectHideFlags: 0 - serializedVersion: 9 - m_Fog: 0 - m_FogColor: {r: 0.5, g: 0.5, b: 0.5, a: 1} - m_FogMode: 3 - m_FogDensity: 0.01 - m_LinearFogStart: 0 - m_LinearFogEnd: 300 - m_AmbientSkyColor: {r: 0.212, g: 0.227, b: 0.259, a: 1} - m_AmbientEquatorColor: {r: 0.114, g: 0.125, b: 0.133, a: 1} - m_AmbientGroundColor: {r: 0.047, g: 0.043, b: 0.035, a: 1} - m_AmbientIntensity: 1 - m_AmbientMode: 0 - m_SubtractiveShadowColor: {r: 0.42, g: 0.478, b: 0.627, a: 1} - m_SkyboxMaterial: {fileID: 10304, guid: 0000000000000000f000000000000000, type: 0} - m_HaloStrength: 0.5 - m_FlareStrength: 1 - m_FlareFadeSpeed: 3 - m_HaloTexture: {fileID: 0} - m_SpotCookie: {fileID: 10001, guid: 0000000000000000e000000000000000, type: 0} - m_DefaultReflectionMode: 0 - m_DefaultReflectionResolution: 128 - m_ReflectionBounces: 1 - m_ReflectionIntensity: 1 - m_CustomReflection: {fileID: 0} - m_Sun: {fileID: 0} - m_IndirectSpecularColor: {r: 0.44657826, g: 0.49641263, b: 0.57481676, a: 1} - m_UseRadianceAmbientProbe: 0 ---- !u!157 &3 -LightmapSettings: - m_ObjectHideFlags: 0 - serializedVersion: 11 - m_GIWorkflowMode: 1 - m_GISettings: - serializedVersion: 2 - m_BounceScale: 1 - m_IndirectOutputScale: 1 - m_AlbedoBoost: 1 - m_EnvironmentLightingMode: 0 - m_EnableBakedLightmaps: 1 - m_EnableRealtimeLightmaps: 0 - m_LightmapEditorSettings: - serializedVersion: 12 - m_Resolution: 2 - m_BakeResolution: 40 - m_AtlasSize: 1024 - m_AO: 0 - m_AOMaxDistance: 1 - m_CompAOExponent: 1 - m_CompAOExponentDirect: 0 - m_ExtractAmbientOcclusion: 0 - m_Padding: 2 - m_LightmapParameters: {fileID: 0} - m_LightmapsBakeMode: 1 - m_TextureCompression: 1 - m_FinalGather: 0 - m_FinalGatherFiltering: 1 - m_FinalGatherRayCount: 256 - m_ReflectionCompression: 2 - m_MixedBakeMode: 2 - m_BakeBackend: 1 - m_PVRSampling: 1 - m_PVRDirectSampleCount: 32 - m_PVRSampleCount: 512 - m_PVRBounces: 2 - m_PVREnvironmentSampleCount: 256 - m_PVREnvironmentReferencePointCount: 2048 - m_PVRFilteringMode: 1 - m_PVRDenoiserTypeDirect: 1 - m_PVRDenoiserTypeIndirect: 1 - m_PVRDenoiserTypeAO: 1 - m_PVRFilterTypeDirect: 0 - m_PVRFilterTypeIndirect: 0 - m_PVRFilterTypeAO: 0 - m_PVREnvironmentMIS: 1 - m_PVRCulling: 1 - m_PVRFilteringGaussRadiusDirect: 1 - m_PVRFilteringGaussRadiusIndirect: 5 - m_PVRFilteringGaussRadiusAO: 2 - m_PVRFilteringAtrousPositionSigmaDirect: 0.5 - m_PVRFilteringAtrousPositionSigmaIndirect: 2 - m_PVRFilteringAtrousPositionSigmaAO: 1 - m_ExportTrainingData: 0 - m_TrainingDataDestination: TrainingData - m_LightProbeSampleCountMultiplier: 4 - m_LightingDataAsset: {fileID: 0} - m_UseShadowmask: 1 ---- !u!196 &4 -NavMeshSettings: - serializedVersion: 2 - m_ObjectHideFlags: 0 - m_BuildSettings: - serializedVersion: 2 - agentTypeID: 0 - agentRadius: 0.5 - agentHeight: 2 - agentSlope: 45 - agentClimb: 0.4 - ledgeDropHeight: 0 - maxJumpAcrossDistance: 0 - minRegionArea: 2 - manualCellSize: 0 - cellSize: 0.16666667 - manualTileSize: 0 - tileSize: 256 - accuratePlacement: 0 - debug: - m_Flags: 0 - m_NavMeshData: {fileID: 0} ---- !u!1 &254804098 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 254804102} - - component: {fileID: 254804101} - - component: {fileID: 254804100} - - component: {fileID: 254804099} - m_Layer: 0 - m_Name: Cube - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!65 &254804099 -BoxCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 254804098} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!23 &254804100 -MeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 254804098} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 2 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 2100000, guid: d911963c0a55d5243b20ebed4dddc9fd, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 1 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!33 &254804101 -MeshFilter: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 254804098} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!4 &254804102 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 254804098} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 49.153797, y: 9.146889, z: 40.07178} - m_LocalScale: {x: 100, y: 1, z: 100} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &676942498 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 676942500} - - component: {fileID: 676942499} - m_Layer: 0 - m_Name: Directional Light - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!108 &676942499 -Light: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 676942498} - m_Enabled: 1 - serializedVersion: 10 - m_Type: 1 - m_Shape: 0 - m_Color: {r: 1, g: 0.95686275, b: 0.8392157, a: 1} - m_Intensity: 1 - m_Range: 10 - m_SpotAngle: 30 - m_InnerSpotAngle: 21.80208 - m_CookieSize: 10 - m_Shadows: - m_Type: 2 - m_Resolution: -1 - m_CustomResolution: -1 - m_Strength: 1 - m_Bias: 0.05 - m_NormalBias: 0.4 - m_NearPlane: 0.2 - m_CullingMatrixOverride: - e00: 1 - e01: 0 - e02: 0 - e03: 0 - e10: 0 - e11: 1 - e12: 0 - e13: 0 - e20: 0 - e21: 0 - e22: 1 - e23: 0 - e30: 0 - e31: 0 - e32: 0 - e33: 1 - m_UseCullingMatrixOverride: 0 - m_Cookie: {fileID: 0} - m_DrawHalo: 0 - m_Flare: {fileID: 0} - m_RenderMode: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingLayerMask: 1 - m_Lightmapping: 4 - m_LightShadowCasterMode: 0 - m_AreaSize: {x: 1, y: 1} - m_BounceIntensity: 1 - m_ColorTemperature: 6570 - m_UseColorTemperature: 0 - m_BoundingSphereOverride: {x: 0, y: 0, z: 0, w: 0} - m_UseBoundingSphereOverride: 0 - m_ShadowRadius: 0 - m_ShadowAngle: 0 ---- !u!4 &676942500 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 676942498} - m_LocalRotation: {x: 0.40821788, y: -0.23456968, z: 0.10938163, w: 0.8754261} - m_LocalPosition: {x: 0, y: 3, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 50, y: -30, z: 0} ---- !u!1 &1991868003 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 1991868006} - - component: {fileID: 1991868005} - - component: {fileID: 1991868004} - m_Layer: 0 - m_Name: Main Camera - m_TagString: MainCamera - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!81 &1991868004 -AudioListener: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 1991868003} - m_Enabled: 1 ---- !u!20 &1991868005 -Camera: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 1991868003} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 1 - m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} - m_projectionMatrixMode: 1 - m_GateFitMode: 2 - m_FOVAxisMode: 0 - m_SensorSize: {x: 36, y: 24} - m_LensShift: {x: 0, y: 0} - m_FocalLength: 50 - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: -1 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!4 &1991868006 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 1991868003} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 1, z: -10} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1001 &8510909887784509496 -PrefabInstance: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 5245491127989480103, guid: 80f6c1c85e5daed4c96c70205ed5503d, - type: 3} - propertyPath: m_RootOrder - value: 3 - objectReference: {fileID: 0} - - target: {fileID: 5245491127989480103, guid: 80f6c1c85e5daed4c96c70205ed5503d, - type: 3} - propertyPath: m_LocalPosition.x - value: 32.94 - objectReference: {fileID: 0} - - target: {fileID: 5245491127989480103, guid: 80f6c1c85e5daed4c96c70205ed5503d, - type: 3} - propertyPath: m_LocalPosition.y - value: 10.9 - objectReference: {fileID: 0} - - target: {fileID: 5245491127989480103, guid: 80f6c1c85e5daed4c96c70205ed5503d, - type: 3} - propertyPath: m_LocalPosition.z - value: 46.67 - objectReference: {fileID: 0} - - target: {fileID: 5245491127989480103, guid: 80f6c1c85e5daed4c96c70205ed5503d, - type: 3} - propertyPath: m_LocalRotation.w - value: 0.7071068 - objectReference: {fileID: 0} - - target: {fileID: 5245491127989480103, guid: 80f6c1c85e5daed4c96c70205ed5503d, - type: 3} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 5245491127989480103, guid: 80f6c1c85e5daed4c96c70205ed5503d, - type: 3} - propertyPath: m_LocalRotation.y - value: 0.7071068 - objectReference: {fileID: 0} - - target: {fileID: 5245491127989480103, guid: 80f6c1c85e5daed4c96c70205ed5503d, - type: 3} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 5245491127989480103, guid: 80f6c1c85e5daed4c96c70205ed5503d, - type: 3} - propertyPath: m_LocalEulerAnglesHint.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 5245491127989480103, guid: 80f6c1c85e5daed4c96c70205ed5503d, - type: 3} - propertyPath: m_LocalEulerAnglesHint.y - value: 90 - objectReference: {fileID: 0} - - target: {fileID: 5245491127989480103, guid: 80f6c1c85e5daed4c96c70205ed5503d, - type: 3} - propertyPath: m_LocalEulerAnglesHint.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 5245491127989480125, guid: 80f6c1c85e5daed4c96c70205ed5503d, - type: 3} - propertyPath: m_Name - value: Player - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_SourcePrefab: {fileID: 100100000, guid: 80f6c1c85e5daed4c96c70205ed5503d, type: 3} diff --git a/Assets/Scripts/Animators.meta b/Assets/Scripts/Animators.meta new file mode 100644 index 0000000..ad7ca65 --- /dev/null +++ b/Assets/Scripts/Animators.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: 0cf97b021cee45eb8f7d402f16955139 +timeCreated: 1652022637 \ No newline at end of file diff --git a/Assets/Scripts/Animators/Kirill Animator.meta b/Assets/Scripts/Animators/Kirill Animator.meta new file mode 100644 index 0000000..6ef1fdf --- /dev/null +++ b/Assets/Scripts/Animators/Kirill Animator.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: 97bb456bbc4248378002eefeb52be6c3 +timeCreated: 1652022665 \ No newline at end of file diff --git a/Assets/Scripts/Character/scr_CharacterController.cs b/Assets/Scripts/Animators/Kirill Animator/CustomCharacterController.cs old mode 100755 new mode 100644 similarity index 95% rename from Assets/Scripts/Character/scr_CharacterController.cs rename to Assets/Scripts/Animators/Kirill Animator/CustomCharacterController.cs index e4f847d..9276565 --- a/Assets/Scripts/Character/scr_CharacterController.cs +++ b/Assets/Scripts/Animators/Kirill Animator/CustomCharacterController.cs @@ -1,255 +1,253 @@ -using UnityEngine; - -using static scr_Models; - -public class scr_CharacterController : MonoBehaviour -{ - - private CharacterController characterController; - private DefaultInput defaultInput; - private Vector2 input_Movement; - [HideInInspector] - public Vector2 input_View; - - private Vector3 newCameraRotation; - private Vector3 newCharacterRotation; - - [Header("References")] - public Transform cameraHolder; - public Transform feetTransform; - - [Header("Settings")] - public PlayerSettingsModel playerSettings; - - public float ViewClampYMin = -70; - public float ViewClampYMax = 80; - public LayerMask playerMask; - - [Header("Gravity")] - public float gravityAmount; - public float gravityMin; - private float playerGravity; - - public Vector3 jumpingForce; - private Vector3 jumpingForceVelocity; - - [Header("Stance")] - public PlayerStance playerStance; - public float playerStanceSmoothing; - public CharacterStance playerStandStance; - public CharacterStance playerCrouchStance; - public CharacterStance playerProneStance; - private float stanceCheckErrorMargin = 0.05f; - - private float cameraHeight; - private float cameraHeightVelocity; - - private bool isSprinting; - - private Vector3 newMovementSpeed; - private Vector3 newMovementSpeedVelocity; - - [Header("Weapon")] public scr_WeaponController currentWeapon; - private void Awake() - { - defaultInput = new DefaultInput(); - - defaultInput.Character.Movement.performed += e => input_Movement = e.ReadValue(); - defaultInput.Character.View.performed += e => input_View = e.ReadValue(); - defaultInput.Character.Jump.performed += e => Jump(); - - defaultInput.Character.Crouch.performed += e => Crouch(); - defaultInput.Character.Prone.performed += e => Prone(); - - defaultInput.Character.Sprint.performed += e => ToggleSprint(); - defaultInput.Character.SprintReleased.performed += e => StopSprint(); - - defaultInput.Enable(); - - newCameraRotation = cameraHolder.localRotation.eulerAngles; - newCharacterRotation = transform.localRotation.eulerAngles; - characterController = GetComponent(); - - cameraHeight = cameraHolder.localPosition.y; - - if (currentWeapon) - { - currentWeapon.Initialise(this); - } - - } - - private void Update() - { - CalculateView(); - CalculateMovement(); - CalculateJump(); - CalculateCameraHeight(); - } - - private void CalculateView() - { - newCharacterRotation.y += playerSettings.ViewXSensetivity * (playerSettings.ViewXInverted ? -input_View.x : input_View.x) * Time.deltaTime; - transform.localRotation = Quaternion.Euler(newCharacterRotation); - - newCameraRotation.x += playerSettings.ViewYSensetivity * (playerSettings.ViewYInverted ? input_View.y : -input_View.y) * Time.deltaTime; - newCameraRotation.x = Mathf.Clamp(newCameraRotation.x, ViewClampYMin, ViewClampYMax); - - cameraHolder.localRotation = Quaternion.Euler(newCameraRotation); - } - - private void CalculateMovement() - { - if (input_Movement.y <= 0.2f) - { - isSprinting = false; - } - - var verticalSpeed = playerSettings.WalkingForwardSpeed; - var horizontalSpeed = playerSettings.WalkingStrafeSpeed; - - if (isSprinting) - { - verticalSpeed = playerSettings.RunningForwardSpeed; - horizontalSpeed = playerSettings.RunningStrafeSpeed; - } - - // Effectors - if (!characterController.isGrounded) - { - playerSettings.SpeedEffector = playerSettings.FallingSpeedEffector; - } - else if (playerStance == PlayerStance.Crouch) - { - playerSettings.SpeedEffector = playerSettings.CrouchSpeedEffector; - } - else if (playerStance == PlayerStance.Prone) - { - playerSettings.SpeedEffector = playerSettings.ProneSpeedEffector; - } - else - { - playerSettings.SpeedEffector = 1; - } - - verticalSpeed *= playerSettings.SpeedEffector; - horizontalSpeed *= playerSettings.SpeedEffector; - - newMovementSpeed = Vector3.SmoothDamp(newMovementSpeed, - new Vector3(horizontalSpeed * input_Movement.x * Time.deltaTime, - 0, verticalSpeed * input_Movement.y * Time.deltaTime), - ref newMovementSpeedVelocity, characterController.isGrounded ? playerSettings.MovementSmoothing : playerSettings.FallingSmoothing); - - var MovementSpeed = transform.TransformDirection(newMovementSpeed); - - if (playerGravity > gravityMin) - { - playerGravity -= gravityAmount * Time.deltaTime; - } - - if (playerGravity < -0.1f && characterController.isGrounded) - { - playerGravity = -0.1f; - } - - MovementSpeed.y += playerGravity; - MovementSpeed += jumpingForce * Time.deltaTime; - - characterController.Move(MovementSpeed); - } - - private void CalculateJump() - { - jumpingForce = Vector3.SmoothDamp(jumpingForce, Vector3.zero, ref jumpingForceVelocity, playerSettings.JumpingFalloff); - } - - private void CalculateCameraHeight() - { - var stanceHeight = playerStandStance.CameraHeight; - - if (playerStance == PlayerStance.Crouch) - { - stanceHeight = playerCrouchStance.CameraHeight; - } - else if (playerStance == PlayerStance.Prone) - { - stanceHeight = playerProneStance.CameraHeight; - } - - cameraHeight = Mathf.SmoothDamp(cameraHolder.localPosition.y, stanceHeight, ref cameraHeightVelocity, playerStanceSmoothing); - - cameraHolder.localPosition = new Vector3(cameraHolder.localPosition.x, cameraHeight, cameraHolder.localPosition.z); - } - private void Jump() - { - if (!characterController.isGrounded || playerStance == PlayerStance.Prone) - { - return; - } - - if (playerStance == PlayerStance.Crouch) - { - if (StanceCheck(playerStandStance.StanceCollider.height)) - { - return; - } - playerStance = PlayerStance.Stand; - return; - } - - // Jump - jumpingForce = Vector3.up * playerSettings.JumpingHeight; - playerGravity = 0; - } - - private void Crouch() - { - if (playerStance == PlayerStance.Crouch) - { - if (StanceCheck(playerStandStance.StanceCollider.height)) - { - return; - } - playerStance = PlayerStance.Stand; - return; - } - if (StanceCheck(playerCrouchStance.StanceCollider.height)) - { - return; - } - playerStance = PlayerStance.Crouch; - } - - private void Prone() - { - playerStance = PlayerStance.Prone; - } - - private bool StanceCheck(float stanceCheckheight) - { - var start = new Vector3(feetTransform.position.x, feetTransform.position.y + characterController.radius + stanceCheckErrorMargin, feetTransform.position.z); - var end = new Vector3(feetTransform.position.x, feetTransform.position.y - characterController.radius - stanceCheckErrorMargin + stanceCheckheight, feetTransform.position.z); - - - return Physics.CheckCapsule(start, end, characterController.radius, playerMask); - } - - private void ToggleSprint() - { - if (input_Movement.y <= 0.2f) - { - isSprinting = false; - return; - } - isSprinting = !isSprinting; - } - - private void StopSprint() - { - if (playerSettings.SprintingHold) - { - isSprinting = false; - } - } - -} +using UnityEngine; + +public class scr_CharacterController : MonoBehaviour +{ + private CharacterController characterController; + private DefaultInput defaultInput; + private Vector2 input_Movement; + [HideInInspector] + public Vector2 input_View; + + private Vector3 newCameraRotation; + private Vector3 newCharacterRotation; + + [Header("References")] + public Transform cameraHolder; + public Transform feetTransform; + + [Header("Settings")] + public PlayerSettingsModel playerSettings; + + public float ViewClampYMin = -70; + public float ViewClampYMax = 80; + public LayerMask playerMask; + + [Header("Gravity")] + public float gravityAmount; + public float gravityMin; + private float playerGravity; + + public Vector3 jumpingForce; + private Vector3 jumpingForceVelocity; + + [Header("Stance")] + public PlayerStance playerStance; + public float playerStanceSmoothing; + public CharacterStance playerStandStance; + public CharacterStance playerCrouchStance; + public CharacterStance playerProneStance; + private float stanceCheckErrorMargin = 0.05f; + + private float cameraHeight; + private float cameraHeightVelocity; + + private bool isSprinting; + + private Vector3 newMovementSpeed; + private Vector3 newMovementSpeedVelocity; + + [Header("Weapon")] + public scr_WeaponController currentWeapon; + private void Awake() + { + defaultInput = new DefaultInput(); + + defaultInput.Character.Movement.performed += e => input_Movement = e.ReadValue(); + defaultInput.Character.View.performed += e => input_View = e.ReadValue(); + defaultInput.Character.Jump.performed += e => Jump(); + + defaultInput.Character.Crouch.performed += e => Crouch(); + defaultInput.Character.Prone.performed += e => Prone(); + + defaultInput.Character.Sprint.performed += e => ToggleSprint(); + defaultInput.Character.SprintReleased.performed += e => StopSprint(); + + defaultInput.Enable(); + + newCameraRotation = cameraHolder.localRotation.eulerAngles; + newCharacterRotation = transform.localRotation.eulerAngles; + characterController = GetComponent(); + + cameraHeight = cameraHolder.localPosition.y; + + if (currentWeapon) + { + currentWeapon.Initialise(this); + } + + } + + private void Update() + { + CalculateView(); + CalculateMovement(); + CalculateJump(); + CalculateCameraHeight(); + } + + private void CalculateView() + { + newCharacterRotation.y += playerSettings.ViewXSensetivity * (playerSettings.ViewXInverted ? -input_View.x : input_View.x) * Time.deltaTime; + transform.localRotation = Quaternion.Euler(newCharacterRotation); + + newCameraRotation.x += playerSettings.ViewYSensetivity * (playerSettings.ViewYInverted ? input_View.y : -input_View.y) * Time.deltaTime; + newCameraRotation.x = Mathf.Clamp(newCameraRotation.x, ViewClampYMin, ViewClampYMax); + + cameraHolder.localRotation = Quaternion.Euler(newCameraRotation); + } + + private void CalculateMovement() + { + if (input_Movement.y <= 0.2f) + { + isSprinting = false; + } + + var verticalSpeed = playerSettings.WalkingForwardSpeed; + var horizontalSpeed = playerSettings.WalkingStrafeSpeed; + + if (isSprinting) + { + verticalSpeed = playerSettings.RunningForwardSpeed; + horizontalSpeed = playerSettings.RunningStrafeSpeed; + } + + // Effectors + if (!characterController.isGrounded) + { + playerSettings.SpeedEffector = playerSettings.FallingSpeedEffector; + } + else if (playerStance == PlayerStance.Crouch) + { + playerSettings.SpeedEffector = playerSettings.CrouchSpeedEffector; + } + else if (playerStance == PlayerStance.Prone) + { + playerSettings.SpeedEffector = playerSettings.ProneSpeedEffector; + } + else + { + playerSettings.SpeedEffector = 1; + } + + verticalSpeed *= playerSettings.SpeedEffector; + horizontalSpeed *= playerSettings.SpeedEffector; + + newMovementSpeed = Vector3.SmoothDamp(newMovementSpeed, + new Vector3(horizontalSpeed * input_Movement.x * Time.deltaTime, + 0, verticalSpeed * input_Movement.y * Time.deltaTime), + ref newMovementSpeedVelocity, characterController.isGrounded ? playerSettings.MovementSmoothing : playerSettings.FallingSmoothing); + + var MovementSpeed = transform.TransformDirection(newMovementSpeed); + + if (playerGravity > gravityMin) + { + playerGravity -= gravityAmount * Time.deltaTime; + } + + if (playerGravity < -0.1f && characterController.isGrounded) + { + playerGravity = -0.1f; + } + + MovementSpeed.y += playerGravity; + MovementSpeed += jumpingForce * Time.deltaTime; + + characterController.Move(MovementSpeed); + } + + private void CalculateJump() + { + jumpingForce = Vector3.SmoothDamp(jumpingForce, Vector3.zero, ref jumpingForceVelocity, playerSettings.JumpingFalloff); + } + + private void CalculateCameraHeight() + { + var stanceHeight = playerStandStance.CameraHeight; + + if (playerStance == PlayerStance.Crouch) + { + stanceHeight = playerCrouchStance.CameraHeight; + } + else if (playerStance == PlayerStance.Prone) + { + stanceHeight = playerProneStance.CameraHeight; + } + + cameraHeight = Mathf.SmoothDamp(cameraHolder.localPosition.y, stanceHeight, ref cameraHeightVelocity, playerStanceSmoothing); + + cameraHolder.localPosition = new Vector3(cameraHolder.localPosition.x, cameraHeight, cameraHolder.localPosition.z); + } + private void Jump() + { + if (!characterController.isGrounded || playerStance == PlayerStance.Prone) + { + return; + } + + if (playerStance == PlayerStance.Crouch) + { + if (StanceCheck(playerStandStance.StanceCollider.height)) + { + return; + } + playerStance = PlayerStance.Stand; + return; + } + + // Jump + jumpingForce = Vector3.up * playerSettings.JumpingHeight; + playerGravity = 0; + } + + private void Crouch() + { + if (playerStance == PlayerStance.Crouch) + { + if (StanceCheck(playerStandStance.StanceCollider.height)) + { + return; + } + playerStance = PlayerStance.Stand; + return; + } + if (StanceCheck(playerCrouchStance.StanceCollider.height)) + { + return; + } + playerStance = PlayerStance.Crouch; + } + + private void Prone() + { + playerStance = PlayerStance.Prone; + } + + private bool StanceCheck(float stanceCheckheight) + { + var start = new Vector3(feetTransform.position.x, feetTransform.position.y + characterController.radius + stanceCheckErrorMargin, feetTransform.position.z); + var end = new Vector3(feetTransform.position.x, feetTransform.position.y - characterController.radius - stanceCheckErrorMargin + stanceCheckheight, feetTransform.position.z); + + + return Physics.CheckCapsule(start, end, characterController.radius, playerMask); + } + + private void ToggleSprint() + { + if (input_Movement.y <= 0.2f) + { + isSprinting = false; + return; + } + isSprinting = !isSprinting; + } + + private void StopSprint() + { + if (playerSettings.SprintingHold) + { + isSprinting = false; + } + } + +} diff --git a/Assets/Scripts/Character/scr_CharacterController.cs.meta b/Assets/Scripts/Animators/Kirill Animator/CustomCharacterController.cs.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Scripts/Character/scr_CharacterController.cs.meta rename to Assets/Scripts/Animators/Kirill Animator/CustomCharacterController.cs.meta diff --git a/Assets/Scripts/Animators/Kirill Animator/Models.cs b/Assets/Scripts/Animators/Kirill Animator/Models.cs new file mode 100644 index 0000000..5e4ec9d --- /dev/null +++ b/Assets/Scripts/Animators/Kirill Animator/Models.cs @@ -0,0 +1,67 @@ +using System; +using UnityEngine; + + +public enum PlayerStance +{ + Stand, + Crouch, + Prone +} + +[Serializable] +public class PlayerSettingsModel +{ + [Header("View Settings")] + public float ViewXSensetivity; + public float ViewYSensetivity; + + public bool ViewXInverted; + public bool ViewYInverted; + + [Header("Movement Settings")] + public bool SprintingHold; + public float MovementSmoothing; + + [Header("Movement - Running")] + public float RunningForwardSpeed; + public float RunningStrafeSpeed; + + [Header("Movement - Walking")] + public float WalkingForwardSpeed; + public float WalkingBackwardSpeed; + public float WalkingStrafeSpeed; + + [Header("Jumping")] + public float JumpingHeight; + public float JumpingFalloff; + public float FallingSmoothing; + + [Header("Speed Effectors")] + public float SpeedEffector = 1; + public float CrouchSpeedEffector; + public float ProneSpeedEffector; + public float FallingSpeedEffector; +} + + +[Serializable] +public class CharacterStance +{ + public float CameraHeight; + public CapsuleCollider StanceCollider; +} + + +[Serializable] +public class WeaponSettingsModel +{ + [Header("Sway")] + public float SwayAmount; + public bool SwayYInverted; + public bool SwayXInverted; + public float SwaySmoothing; + public float SwayResetSmoothing; + public float SwayClampX; + public float SwayClampY; +} \ No newline at end of file diff --git a/Assets/Scripts/Character/scr_Models.cs.meta b/Assets/Scripts/Animators/Kirill Animator/Models.cs.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Scripts/Character/scr_Models.cs.meta rename to Assets/Scripts/Animators/Kirill Animator/Models.cs.meta diff --git a/Assets/Scripts/Animators/Leonid Animator.meta b/Assets/Scripts/Animators/Leonid Animator.meta new file mode 100644 index 0000000..e6c9ea6 --- /dev/null +++ b/Assets/Scripts/Animators/Leonid Animator.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: 2cc90c830ef641a1a18e8e21dc97dec0 +timeCreated: 1652022681 \ No newline at end of file diff --git a/Assets/Scripts/Animators/Leonid Animator/AnimatorHandler.cs b/Assets/Scripts/Animators/Leonid Animator/AnimatorHandler.cs new file mode 100644 index 0000000..6821141 --- /dev/null +++ b/Assets/Scripts/Animators/Leonid Animator/AnimatorHandler.cs @@ -0,0 +1,94 @@ +using Unity.Mathematics; +using UnityEngine; + +namespace Animators.Leonid_Animator +{ + public class AnimatorHandler : MonoBehaviour + { + public Animator anim; + public bool canRotate; + + private int _horizontal; + private int _vertical; + private bool _isCrouching = false; + private bool _isJumping; + + private int _crouch; + private int _jump; + private int _fired; + + public void Initialize() + { + anim = GetComponent(); + _vertical = Animator.StringToHash(nameof(_vertical)); + _horizontal = Animator.StringToHash(nameof(_horizontal)); + _crouch = Animator.StringToHash(nameof(_crouch)); + _jump = Animator.StringToHash(nameof(_jump)); + _fired = Animator.StringToHash(nameof(_fired)); + } + + public void UpdateAnimatorValues(float verticalMovement, float horizontalMovement, + bool pressedJumped, bool pressedCrouching, bool firePressed) + { + #region Vertical Movement + + var vertical = 0f; + if (verticalMovement > 0 && verticalMovement < 0.55) + vertical = 0.5f; + else if (verticalMovement > 0.55) + vertical = 1; + else if (verticalMovement < 0 && verticalMovement > -0.55) + { + vertical = -0.5f; + } + else if (verticalMovement < -0.55) + { + vertical = -1; + } + else + { + vertical = 0; + } + #endregion + + #region Vertical Movement + + var horizontal = 0f; + if (horizontalMovement > 0 && horizontalMovement < 0.55) + horizontal = 0.5f; + else if (horizontalMovement > 0.55) + horizontal = 1; + else if (horizontalMovement < 0 && horizontalMovement > -0.55) + { + horizontal = -0.5f; + } + else if (horizontalMovement < -0.55) + { + horizontal = -1; + } + else + { + horizontal = 0; + } + #endregion + + anim.SetFloat(_horizontal, horizontal, 0.1f, Time.deltaTime); + anim.SetFloat(_vertical, vertical, 0.1f, Time.deltaTime); + + if (pressedCrouching == true) + { + _isCrouching = !_isCrouching; + if (_isCrouching == true) + transform.Rotate(Vector3.up, 45); + else + { + transform.Rotate(Vector3.up, -45); + } + anim.SetBool(_crouch, _isCrouching); + } + + anim.SetBool(_jump, pressedJumped); + anim.SetBool(_fired, firePressed); + } + } +} \ No newline at end of file diff --git a/Assets/Scripts/Animators/Leonid Animator/AnimatorHandler.cs.meta b/Assets/Scripts/Animators/Leonid Animator/AnimatorHandler.cs.meta new file mode 100644 index 0000000..a475077 --- /dev/null +++ b/Assets/Scripts/Animators/Leonid Animator/AnimatorHandler.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: f9c5f555eb7641518e39a97abe893cd8 +timeCreated: 1652031215 \ No newline at end of file diff --git a/Assets/SciFiWarriorPBRHPPolyart/Animators/SciFiWarrior.controller b/Assets/Scripts/Animators/Leonid Animator/CharacterAnimator.controller old mode 100755 new mode 100644 similarity index 56% rename from Assets/SciFiWarriorPBRHPPolyart/Animators/SciFiWarrior.controller rename to Assets/Scripts/Animators/Leonid Animator/CharacterAnimator.controller index a2a4e1d..12a24c7 --- a/Assets/SciFiWarriorPBRHPPolyart/Animators/SciFiWarrior.controller +++ b/Assets/Scripts/Animators/Leonid Animator/CharacterAnimator.controller @@ -1,358 +1,109 @@ %YAML 1.1 %TAG !u! tag:unity3d.com,2011: ---- !u!91 &9100000 -AnimatorController: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: SciFiWarrior - serializedVersion: 5 - m_AnimatorParameters: [] - m_AnimatorLayers: - - serializedVersion: 5 - m_Name: Base Layer - m_StateMachine: {fileID: 1107476691846109156} - m_Mask: {fileID: 0} - m_Motions: [] - m_Behaviours: [] - m_BlendingMode: 0 - m_SyncedLayerIndex: -1 - m_DefaultWeight: 0 - m_IKPass: 0 - m_SyncedLayerAffectsTiming: 0 - m_Controller: {fileID: 9100000} ---- !u!1101 &1101005713632051030 +--- !u!1101 &-8614502741554326989 AnimatorStateTransition: m_ObjectHideFlags: 1 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_Name: - m_Conditions: [] + m_Conditions: + - m_ConditionMode: 1 + m_ConditionEvent: _died + m_EventTreshold: 0 m_DstStateMachine: {fileID: 0} - m_DstState: {fileID: 1102885244088947746} + m_DstState: {fileID: -5023192667791512651} m_Solo: 0 m_Mute: 0 m_IsExit: 0 serializedVersion: 3 - m_TransitionDuration: 0.25 + m_TransitionDuration: 0.46723264 + m_TransitionOffset: 0.21265899 + m_ExitTime: 0.57224524 + m_HasExitTime: 0 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1107 &-8265500127550764659 +AnimatorStateMachine: + serializedVersion: 6 + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: Upper + m_ChildStates: + - serializedVersion: 1 + m_State: {fileID: -4600760231423422918} + m_Position: {x: 340, y: 140, z: 0} + - serializedVersion: 1 + m_State: {fileID: 7739110899394721029} + m_Position: {x: 510, y: -10, z: 0} + - serializedVersion: 1 + m_State: {fileID: -4187437994059944167} + m_Position: {x: 400, y: 290, z: 0} + - serializedVersion: 1 + m_State: {fileID: -5023192667791512651} + m_Position: {x: 830, y: 130, z: 0} + m_ChildStateMachines: [] + m_AnyStateTransitions: [] + m_EntryTransitions: [] + m_StateMachineTransitions: {} + m_StateMachineBehaviours: [] + m_AnyStatePosition: {x: 50, y: 20, z: 0} + m_EntryPosition: {x: 50, y: 120, z: 0} + m_ExitPosition: {x: 1150, y: 160, z: 0} + m_ParentStateMachinePosition: {x: 800, y: 20, z: 0} + m_DefaultState: {fileID: -4600760231423422918} +--- !u!1101 &-6757359955429936644 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: Jump to Run + m_Conditions: [] + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: 8354844821256608690} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 0.20300466 m_TransitionOffset: 0 - m_ExitTime: 4 + m_ExitTime: 0.7916667 m_HasExitTime: 1 m_HasFixedDuration: 1 m_InterruptionSource: 0 m_OrderedInterruption: 1 m_CanTransitionToSelf: 1 ---- !u!1101 &1101018267375076786 +--- !u!1101 &-6673604382440492192 AnimatorStateTransition: m_ObjectHideFlags: 1 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_Name: - m_Conditions: [] + m_Conditions: + - m_ConditionMode: 1 + m_ConditionEvent: _fired + m_EventTreshold: 0 m_DstStateMachine: {fileID: 0} - m_DstState: {fileID: 1102942199553133880} + m_DstState: {fileID: -4187437994059944167} m_Solo: 0 m_Mute: 0 m_IsExit: 0 serializedVersion: 3 - m_TransitionDuration: 0.25 - m_TransitionOffset: 0 - m_ExitTime: 4 - m_HasExitTime: 1 + m_TransitionDuration: 1.2815269 + m_TransitionOffset: 0.04597427 + m_ExitTime: 0.009130422 + m_HasExitTime: 0 m_HasFixedDuration: 1 m_InterruptionSource: 0 m_OrderedInterruption: 1 m_CanTransitionToSelf: 1 ---- !u!1101 &1101029586256411172 -AnimatorStateTransition: - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: - m_Conditions: [] - m_DstStateMachine: {fileID: 0} - m_DstState: {fileID: 1102525727787806748} - m_Solo: 0 - m_Mute: 0 - m_IsExit: 0 - serializedVersion: 3 - m_TransitionDuration: 0.25 - m_TransitionOffset: 0 - m_ExitTime: 4 - m_HasExitTime: 1 - m_HasFixedDuration: 1 - m_InterruptionSource: 0 - m_OrderedInterruption: 1 - m_CanTransitionToSelf: 1 ---- !u!1101 &1101085741205891168 -AnimatorStateTransition: - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: - m_Conditions: [] - m_DstStateMachine: {fileID: 0} - m_DstState: {fileID: 1102100670134039114} - m_Solo: 0 - m_Mute: 0 - m_IsExit: 0 - serializedVersion: 3 - m_TransitionDuration: 0.25 - m_TransitionOffset: 0 - m_ExitTime: 4 - m_HasExitTime: 1 - m_HasFixedDuration: 1 - m_InterruptionSource: 0 - m_OrderedInterruption: 1 - m_CanTransitionToSelf: 1 ---- !u!1101 &1101213140064716440 -AnimatorStateTransition: - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: - m_Conditions: [] - m_DstStateMachine: {fileID: 0} - m_DstState: {fileID: 1102901032506865742} - m_Solo: 0 - m_Mute: 0 - m_IsExit: 0 - serializedVersion: 3 - m_TransitionDuration: 0.25 - m_TransitionOffset: 0 - m_ExitTime: 6 - m_HasExitTime: 1 - m_HasFixedDuration: 1 - m_InterruptionSource: 0 - m_OrderedInterruption: 1 - m_CanTransitionToSelf: 1 ---- !u!1101 &1101350354845918052 -AnimatorStateTransition: - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: - m_Conditions: [] - m_DstStateMachine: {fileID: 0} - m_DstState: {fileID: 1102106218193099826} - m_Solo: 0 - m_Mute: 0 - m_IsExit: 0 - serializedVersion: 3 - m_TransitionDuration: 0.25 - m_TransitionOffset: 0 - m_ExitTime: 6 - m_HasExitTime: 1 - m_HasFixedDuration: 1 - m_InterruptionSource: 0 - m_OrderedInterruption: 1 - m_CanTransitionToSelf: 1 ---- !u!1101 &1101363450263446180 -AnimatorStateTransition: - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: - m_Conditions: [] - m_DstStateMachine: {fileID: 0} - m_DstState: {fileID: 1102243588419055754} - m_Solo: 0 - m_Mute: 0 - m_IsExit: 0 - serializedVersion: 3 - m_TransitionDuration: 0.25 - m_TransitionOffset: 0 - m_ExitTime: 3 - m_HasExitTime: 1 - m_HasFixedDuration: 1 - m_InterruptionSource: 0 - m_OrderedInterruption: 1 - m_CanTransitionToSelf: 1 ---- !u!1101 &1101376048922213750 -AnimatorStateTransition: - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: - m_Conditions: [] - m_DstStateMachine: {fileID: 0} - m_DstState: {fileID: 1102949299928912516} - m_Solo: 0 - m_Mute: 0 - m_IsExit: 0 - serializedVersion: 3 - m_TransitionDuration: 0.25 - m_TransitionOffset: 0 - m_ExitTime: 4 - m_HasExitTime: 1 - m_HasFixedDuration: 1 - m_InterruptionSource: 0 - m_OrderedInterruption: 1 - m_CanTransitionToSelf: 1 ---- !u!1101 &1101404391616814046 -AnimatorStateTransition: - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: - m_Conditions: [] - m_DstStateMachine: {fileID: 0} - m_DstState: {fileID: 1102379806414260514} - m_Solo: 0 - m_Mute: 0 - m_IsExit: 0 - serializedVersion: 3 - m_TransitionDuration: 0.25 - m_TransitionOffset: 0 - m_ExitTime: 4 - m_HasExitTime: 1 - m_HasFixedDuration: 1 - m_InterruptionSource: 0 - m_OrderedInterruption: 1 - m_CanTransitionToSelf: 1 ---- !u!1101 &1101429387431206348 -AnimatorStateTransition: - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: - m_Conditions: [] - m_DstStateMachine: {fileID: 0} - m_DstState: {fileID: 1102277268960662982} - m_Solo: 0 - m_Mute: 0 - m_IsExit: 0 - serializedVersion: 3 - m_TransitionDuration: 0.25 - m_TransitionOffset: 0 - m_ExitTime: 4 - m_HasExitTime: 1 - m_HasFixedDuration: 1 - m_InterruptionSource: 0 - m_OrderedInterruption: 1 - m_CanTransitionToSelf: 1 ---- !u!1101 &1101567676818271368 -AnimatorStateTransition: - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: - m_Conditions: [] - m_DstStateMachine: {fileID: 0} - m_DstState: {fileID: 1102238716766734490} - m_Solo: 0 - m_Mute: 0 - m_IsExit: 0 - serializedVersion: 3 - m_TransitionDuration: 0.25 - m_TransitionOffset: 0 - m_ExitTime: 4 - m_HasExitTime: 1 - m_HasFixedDuration: 1 - m_InterruptionSource: 0 - m_OrderedInterruption: 1 - m_CanTransitionToSelf: 1 ---- !u!1101 &1101616875765159898 -AnimatorStateTransition: - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: - m_Conditions: [] - m_DstStateMachine: {fileID: 0} - m_DstState: {fileID: 1102642198119822076} - m_Solo: 0 - m_Mute: 0 - m_IsExit: 0 - serializedVersion: 3 - m_TransitionDuration: 0.25 - m_TransitionOffset: 0 - m_ExitTime: 6 - m_HasExitTime: 1 - m_HasFixedDuration: 1 - m_InterruptionSource: 0 - m_OrderedInterruption: 1 - m_CanTransitionToSelf: 1 ---- !u!1101 &1101624051492446352 -AnimatorStateTransition: - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: - m_Conditions: [] - m_DstStateMachine: {fileID: 0} - m_DstState: {fileID: 1102361151154106906} - m_Solo: 0 - m_Mute: 0 - m_IsExit: 0 - serializedVersion: 3 - m_TransitionDuration: 0.25 - m_TransitionOffset: 0 - m_ExitTime: 4 - m_HasExitTime: 1 - m_HasFixedDuration: 1 - m_InterruptionSource: 0 - m_OrderedInterruption: 1 - m_CanTransitionToSelf: 1 ---- !u!1101 &1101871786086276220 -AnimatorStateTransition: - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: - m_Conditions: [] - m_DstStateMachine: {fileID: 0} - m_DstState: {fileID: 1102625340767904258} - m_Solo: 0 - m_Mute: 0 - m_IsExit: 0 - serializedVersion: 3 - m_TransitionDuration: 0.25 - m_TransitionOffset: 0 - m_ExitTime: 10 - m_HasExitTime: 1 - m_HasFixedDuration: 1 - m_InterruptionSource: 0 - m_OrderedInterruption: 1 - m_CanTransitionToSelf: 1 ---- !u!1101 &1101995184118313822 -AnimatorStateTransition: - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: - m_Conditions: [] - m_DstStateMachine: {fileID: 0} - m_DstState: {fileID: 1102088155696507906} - m_Solo: 0 - m_Mute: 0 - m_IsExit: 0 - serializedVersion: 3 - m_TransitionDuration: 0.25 - m_TransitionOffset: 0 - m_ExitTime: 6 - m_HasExitTime: 1 - m_HasFixedDuration: 1 - m_InterruptionSource: 0 - m_OrderedInterruption: 1 - m_CanTransitionToSelf: 1 ---- !u!1102 &1102088155696507906 +--- !u!1102 &-5914497066343941395 AnimatorState: serializedVersion: 6 m_ObjectHideFlags: 1 @@ -363,7 +114,8 @@ AnimatorState: m_Speed: 1 m_CycleOffset: 0 m_Transitions: - - {fileID: 1101429387431206348} + - {fileID: 5168308916736617153} + - {fileID: 9141976730198879995} m_StateMachineBehaviours: [] m_Position: {x: 50, y: 50, z: 0} m_IKOnFeet: 0 @@ -379,115 +131,32 @@ AnimatorState: m_MirrorParameter: m_CycleOffsetParameter: m_TimeParameter: ---- !u!1102 &1102100670134039114 -AnimatorState: - serializedVersion: 6 +--- !u!1101 &-5341886129914063569 +AnimatorStateTransition: m_ObjectHideFlags: 1 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_Name: Idle_Shoot_Ar - m_Speed: 1 - m_CycleOffset: 0 - m_Transitions: - - {fileID: 1101005713632051030} - m_StateMachineBehaviours: [] - m_Position: {x: 50, y: 50, z: 0} - m_IKOnFeet: 0 - m_WriteDefaultValues: 1 - m_Mirror: 0 - m_SpeedParameterActive: 0 - m_MirrorParameterActive: 0 - m_CycleOffsetParameterActive: 0 - m_TimeParameterActive: 0 - m_Motion: {fileID: 7400000, guid: 1d4365e1541bb6949a273318862b72d3, type: 3} - m_Tag: - m_SpeedParameter: - m_MirrorParameter: - m_CycleOffsetParameter: - m_TimeParameter: ---- !u!1102 &1102106218193099826 -AnimatorState: - serializedVersion: 6 - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: Shoot_Autoshot_AR - m_Speed: 1 - m_CycleOffset: 0 - m_Transitions: - - {fileID: 1101871786086276220} - m_StateMachineBehaviours: [] - m_Position: {x: 50, y: 50, z: 0} - m_IKOnFeet: 0 - m_WriteDefaultValues: 1 - m_Mirror: 0 - m_SpeedParameterActive: 0 - m_MirrorParameterActive: 0 - m_CycleOffsetParameterActive: 0 - m_TimeParameterActive: 0 - m_Motion: {fileID: 7400000, guid: 76a61eccc11250c4da8bd7f1d87fa2fe, type: 3} - m_Tag: - m_SpeedParameter: - m_MirrorParameter: - m_CycleOffsetParameter: - m_TimeParameter: ---- !u!1102 &1102238716766734490 -AnimatorState: - serializedVersion: 6 - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: Idle_gunMiddle_AR - m_Speed: 1 - m_CycleOffset: 0 - m_Transitions: - - {fileID: 1101085741205891168} - m_StateMachineBehaviours: [] - m_Position: {x: 50, y: 50, z: 0} - m_IKOnFeet: 0 - m_WriteDefaultValues: 1 - m_Mirror: 0 - m_SpeedParameterActive: 0 - m_MirrorParameterActive: 0 - m_CycleOffsetParameterActive: 0 - m_TimeParameterActive: 0 - m_Motion: {fileID: 7400000, guid: 107649059ea401b4e9c5c20f21e99a55, type: 3} - m_Tag: - m_SpeedParameter: - m_MirrorParameter: - m_CycleOffsetParameter: - m_TimeParameter: ---- !u!1102 &1102243588419055754 -AnimatorState: - serializedVersion: 6 - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: WalkFront_Shoot_AR - m_Speed: 1 - m_CycleOffset: 0 - m_Transitions: - - {fileID: 1101029586256411172} - m_StateMachineBehaviours: [] - m_Position: {x: 50, y: 50, z: 0} - m_IKOnFeet: 0 - m_WriteDefaultValues: 1 - m_Mirror: 0 - m_SpeedParameterActive: 0 - m_MirrorParameterActive: 0 - m_CycleOffsetParameterActive: 0 - m_TimeParameterActive: 0 - m_Motion: {fileID: 7400000, guid: e398e3aa9cdfcdc4ebd01a06e4c544ae, type: 3} - m_Tag: - m_SpeedParameter: - m_MirrorParameter: - m_CycleOffsetParameter: - m_TimeParameter: ---- !u!1102 &1102277268960662982 + m_Name: + m_Conditions: + - m_ConditionMode: 2 + m_ConditionEvent: _jump + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: -4600760231423422918} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 0.25 + m_TransitionOffset: 0 + m_ExitTime: 0.7916667 + m_HasExitTime: 1 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1102 &-5023192667791512651 AnimatorState: serializedVersion: 6 m_ObjectHideFlags: 1 @@ -497,7 +166,8 @@ AnimatorState: m_Name: Die m_Speed: 1 m_CycleOffset: 0 - m_Transitions: [] + m_Transitions: + - {fileID: 1131199853383832992} m_StateMachineBehaviours: [] m_Position: {x: 50, y: 50, z: 0} m_IKOnFeet: 0 @@ -513,18 +183,70 @@ AnimatorState: m_MirrorParameter: m_CycleOffsetParameter: m_TimeParameter: ---- !u!1102 &1102361151154106906 +--- !u!1101 &-4911913766117122026 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: + m_Conditions: + - m_ConditionMode: 2 + m_ConditionEvent: _died + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: 0} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 1 + serializedVersion: 3 + m_TransitionDuration: 0.25 + m_TransitionOffset: 0 + m_ExitTime: 0.765625 + m_HasExitTime: 1 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1101 &-4865886577319040672 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: Run to Jump + m_Conditions: + - m_ConditionMode: 1 + m_ConditionEvent: _jump + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: -5914497066343941395} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 0.25 + m_TransitionOffset: 0 + m_ExitTime: 0.75 + m_HasExitTime: 0 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1102 &-4600760231423422918 AnimatorState: serializedVersion: 6 m_ObjectHideFlags: 1 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_Name: WalkRight_Shoot_AR + m_Name: Idle m_Speed: 1 m_CycleOffset: 0 m_Transitions: - - {fileID: 1101404391616814046} + - {fileID: -632880110147916710} + - {fileID: -6673604382440492192} + - {fileID: -70235823626180740} m_StateMachineBehaviours: [] m_Position: {x: 50, y: 50, z: 0} m_IKOnFeet: 0 @@ -534,24 +256,25 @@ AnimatorState: m_MirrorParameterActive: 0 m_CycleOffsetParameterActive: 0 m_TimeParameterActive: 0 - m_Motion: {fileID: 7400000, guid: e6a236f5b06f9cb44ae1ca8b50fcd8ca, type: 3} + m_Motion: {fileID: 7400000, guid: 107649059ea401b4e9c5c20f21e99a55, type: 3} m_Tag: m_SpeedParameter: m_MirrorParameter: m_CycleOffsetParameter: m_TimeParameter: ---- !u!1102 &1102379806414260514 +--- !u!1102 &-4187437994059944167 AnimatorState: serializedVersion: 6 m_ObjectHideFlags: 1 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_Name: WalkBack_Shoot_AR - m_Speed: 1 + m_Name: Fire + m_Speed: 10 m_CycleOffset: 0 m_Transitions: - - {fileID: 1101018267375076786} + - {fileID: -146642472328627549} + - {fileID: -8614502741554326989} m_StateMachineBehaviours: [] m_Position: {x: 50, y: 50, z: 0} m_IKOnFeet: 0 @@ -561,132 +284,128 @@ AnimatorState: m_MirrorParameterActive: 0 m_CycleOffsetParameterActive: 0 m_TimeParameterActive: 0 - m_Motion: {fileID: 7400000, guid: 669b29b2139d8de4ca5447dd50e201af, type: 3} + m_Motion: {fileID: 7400000, guid: 1d4365e1541bb6949a273318862b72d3, type: 3} m_Tag: m_SpeedParameter: m_MirrorParameter: m_CycleOffsetParameter: m_TimeParameter: ---- !u!1102 &1102525727787806748 -AnimatorState: +--- !u!1101 &-4114131529631250501 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: + m_Conditions: + - m_ConditionMode: 1 + m_ConditionEvent: _died + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: -5023192667791512651} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 0.7343743 + m_TransitionOffset: 0.08256394 + m_ExitTime: 0.28710982 + m_HasExitTime: 0 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1101 &-3812287898245291883 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: Jump to Run + m_Conditions: [] + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: 8354844821256608690} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 0.20300466 + m_TransitionOffset: 0 + m_ExitTime: 0.7916667 + m_HasExitTime: 1 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1101 &-3071590976036615157 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: + m_Conditions: + - m_ConditionMode: 1 + m_ConditionEvent: _died + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: 381506674367370628} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 0.25 + m_TransitionOffset: 0.542241 + m_ExitTime: 0.75 + m_HasExitTime: 1 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1107 &-2302487397917704150 +AnimatorStateMachine: serializedVersion: 6 m_ObjectHideFlags: 1 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_Name: WalkLeft_Shoot_AR - m_Speed: 1 - m_CycleOffset: 0 - m_Transitions: - - {fileID: 1101624051492446352} + m_Name: Lower + m_ChildStates: + - serializedVersion: 1 + m_State: {fileID: 8354844821256608690} + m_Position: {x: 460, y: 60, z: 0} + - serializedVersion: 1 + m_State: {fileID: -5914497066343941395} + m_Position: {x: 580, y: -120, z: 0} + - serializedVersion: 1 + m_State: {fileID: -1946935121236979824} + m_Position: {x: 490, y: 230, z: 0} + - serializedVersion: 1 + m_State: {fileID: 381506674367370628} + m_Position: {x: 980, y: 60, z: 0} + m_ChildStateMachines: [] + m_AnyStateTransitions: [] + m_EntryTransitions: [] + m_StateMachineTransitions: {} m_StateMachineBehaviours: [] - m_Position: {x: 50, y: 50, z: 0} - m_IKOnFeet: 0 - m_WriteDefaultValues: 1 - m_Mirror: 0 - m_SpeedParameterActive: 0 - m_MirrorParameterActive: 0 - m_CycleOffsetParameterActive: 0 - m_TimeParameterActive: 0 - m_Motion: {fileID: 7400000, guid: 34d1b8e01a0412f449dcefc77b05e27b, type: 3} - m_Tag: - m_SpeedParameter: - m_MirrorParameter: - m_CycleOffsetParameter: - m_TimeParameter: ---- !u!1102 &1102593466183913558 + m_AnyStatePosition: {x: 50, y: 20, z: 0} + m_EntryPosition: {x: 50, y: 120, z: 0} + m_ExitPosition: {x: 1130, y: 240, z: 0} + m_ParentStateMachinePosition: {x: 800, y: 20, z: 0} + m_DefaultState: {fileID: 8354844821256608690} +--- !u!1102 &-1946935121236979824 AnimatorState: serializedVersion: 6 m_ObjectHideFlags: 1 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_Name: Idle_Guard_AR + m_Name: Crouch m_Speed: 1 m_CycleOffset: 0 m_Transitions: - - {fileID: 1101567676818271368} - m_StateMachineBehaviours: [] - m_Position: {x: 50, y: 50, z: 0} - m_IKOnFeet: 0 - m_WriteDefaultValues: 1 - m_Mirror: 0 - m_SpeedParameterActive: 0 - m_MirrorParameterActive: 0 - m_CycleOffsetParameterActive: 0 - m_TimeParameterActive: 0 - m_Motion: {fileID: 7400000, guid: 21a805ef27ad7ef42904fff42be01ba6, type: 3} - m_Tag: - m_SpeedParameter: - m_MirrorParameter: - m_CycleOffsetParameter: - m_TimeParameter: ---- !u!1102 &1102625340767904258 -AnimatorState: - serializedVersion: 6 - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: Reload - m_Speed: 1 - m_CycleOffset: 0 - m_Transitions: - - {fileID: 1101363450263446180} - m_StateMachineBehaviours: [] - m_Position: {x: 50, y: 50, z: 0} - m_IKOnFeet: 0 - m_WriteDefaultValues: 1 - m_Mirror: 0 - m_SpeedParameterActive: 0 - m_MirrorParameterActive: 0 - m_CycleOffsetParameterActive: 0 - m_TimeParameterActive: 0 - m_Motion: {fileID: 7400000, guid: 45b9c258f262e844e8f5316722fc04c6, type: 3} - m_Tag: - m_SpeedParameter: - m_MirrorParameter: - m_CycleOffsetParameter: - m_TimeParameter: ---- !u!1102 &1102642198119822076 -AnimatorState: - serializedVersion: 6 - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: Shoot_BurstShot_AR - m_Speed: 1 - m_CycleOffset: 0 - m_Transitions: - - {fileID: 1101350354845918052} - m_StateMachineBehaviours: [] - m_Position: {x: 50, y: 50, z: 0} - m_IKOnFeet: 0 - m_WriteDefaultValues: 1 - m_Mirror: 0 - m_SpeedParameterActive: 0 - m_MirrorParameterActive: 0 - m_CycleOffsetParameterActive: 0 - m_TimeParameterActive: 0 - m_Motion: {fileID: 7400000, guid: 9e6c2d8b423b3db4ca189e8497d60f1e, type: 3} - m_Tag: - m_SpeedParameter: - m_MirrorParameter: - m_CycleOffsetParameter: - m_TimeParameter: ---- !u!1102 &1102885244088947746 -AnimatorState: - serializedVersion: 6 - m_ObjectHideFlags: 1 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_Name: Idle_Ducking_AR - m_Speed: 1 - m_CycleOffset: 0 - m_Transitions: - - {fileID: 1101376048922213750} + - {fileID: 5680347307725438578} + - {fileID: -3071590976036615157} m_StateMachineBehaviours: [] m_Position: {x: 50, y: 50, z: 0} m_IKOnFeet: 0 @@ -699,21 +418,197 @@ AnimatorState: m_Motion: {fileID: 7400000, guid: 3ad7c5979f6586d4a9532a55492a0ebe, type: 3} m_Tag: m_SpeedParameter: - m_MirrorParameter: + m_MirrorParameter: _jump m_CycleOffsetParameter: m_TimeParameter: ---- !u!1102 &1102901032506865742 +--- !u!206 &-1862914767576164720 +BlendTree: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: Blend Tree + m_Childs: + - serializedVersion: 2 + m_Motion: {fileID: 7400000, guid: 107649059ea401b4e9c5c20f21e99a55, type: 3} + m_Threshold: 0 + m_Position: {x: 0, y: 0} + m_TimeScale: 0.5 + m_CycleOffset: 0 + m_DirectBlendParameter: horizontal + m_Mirror: 0 + - serializedVersion: 2 + m_Motion: {fileID: 7400000, guid: 043a0882d93547c4da0104443de76efb, type: 3} + m_Threshold: 1 + m_Position: {x: 1, y: 0} + m_TimeScale: 1 + m_CycleOffset: 0 + m_DirectBlendParameter: horizontal + m_Mirror: 0 + m_BlendParameter: _vertical + m_BlendParameterY: _horizontal + m_MinThreshold: 0 + m_MaxThreshold: 1 + m_UseAutomaticThresholds: 1 + m_NormalizedBlendValues: 0 + m_BlendType: 3 +--- !u!1101 &-632880110147916710 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: + m_Conditions: + - m_ConditionMode: 1 + m_ConditionEvent: _jump + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: 7739110899394721029} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 0.25 + m_TransitionOffset: 0 + m_ExitTime: 0.8125 + m_HasExitTime: 1 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1101 &-146642472328627549 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: + m_Conditions: + - m_ConditionMode: 2 + m_ConditionEvent: _fired + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: -4600760231423422918} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 0.25 + m_TransitionOffset: 0 + m_ExitTime: 0.8125 + m_HasExitTime: 1 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1101 &-70235823626180740 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: + m_Conditions: + - m_ConditionMode: 1 + m_ConditionEvent: _died + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: -5023192667791512651} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 0.982062 + m_TransitionOffset: 0.000000027939697 + m_ExitTime: 0.26345384 + m_HasExitTime: 0 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!91 &9100000 +AnimatorController: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: CharacterAnimator + serializedVersion: 5 + m_AnimatorParameters: + - m_Name: _horizontal + m_Type: 1 + m_DefaultFloat: 0 + m_DefaultInt: 0 + m_DefaultBool: 0 + m_Controller: {fileID: 9100000} + - m_Name: _vertical + m_Type: 1 + m_DefaultFloat: 1 + m_DefaultInt: 0 + m_DefaultBool: 0 + m_Controller: {fileID: 9100000} + - m_Name: _jump + m_Type: 4 + m_DefaultFloat: 0 + m_DefaultInt: 0 + m_DefaultBool: 0 + m_Controller: {fileID: 9100000} + - m_Name: _crouch + m_Type: 4 + m_DefaultFloat: 0 + m_DefaultInt: 0 + m_DefaultBool: 0 + m_Controller: {fileID: 9100000} + - m_Name: _died + m_Type: 4 + m_DefaultFloat: 0 + m_DefaultInt: 0 + m_DefaultBool: 0 + m_Controller: {fileID: 9100000} + - m_Name: _fired + m_Type: 4 + m_DefaultFloat: 0 + m_DefaultInt: 0 + m_DefaultBool: 0 + m_Controller: {fileID: 9100000} + m_AnimatorLayers: + - serializedVersion: 5 + m_Name: Lower + m_StateMachine: {fileID: -2302487397917704150} + m_Mask: {fileID: 31900000, guid: 1122aed799ca7574a8f0d2efa30e9d99, type: 2} + m_Motions: [] + m_Behaviours: [] + m_BlendingMode: 0 + m_SyncedLayerIndex: -1 + m_DefaultWeight: 0 + m_IKPass: 0 + m_SyncedLayerAffectsTiming: 0 + m_Controller: {fileID: 9100000} + - serializedVersion: 5 + m_Name: Upper + m_StateMachine: {fileID: -8265500127550764659} + m_Mask: {fileID: 31900000, guid: 368b178fc56a14549b588ee80c7cbf81, type: 2} + m_Motions: [] + m_Behaviours: [] + m_BlendingMode: 0 + m_SyncedLayerIndex: -1 + m_DefaultWeight: 1 + m_IKPass: 0 + m_SyncedLayerAffectsTiming: 0 + m_Controller: {fileID: 9100000} +--- !u!1102 &381506674367370628 AnimatorState: serializedVersion: 6 m_ObjectHideFlags: 1 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_Name: Run_guard_AR + m_Name: Die m_Speed: 1 m_CycleOffset: 0 m_Transitions: - - {fileID: 1101995184118313822} + - {fileID: -4911913766117122026} m_StateMachineBehaviours: [] m_Position: {x: 50, y: 50, z: 0} m_IKOnFeet: 0 @@ -723,24 +618,222 @@ AnimatorState: m_MirrorParameterActive: 0 m_CycleOffsetParameterActive: 0 m_TimeParameterActive: 0 - m_Motion: {fileID: 7400000, guid: 043a0882d93547c4da0104443de76efb, type: 3} + m_Motion: {fileID: 7400000, guid: d406f8f3cbe268f4e9d0234d45cca60c, type: 3} m_Tag: m_SpeedParameter: m_MirrorParameter: m_CycleOffsetParameter: m_TimeParameter: ---- !u!1102 &1102942199553133880 +--- !u!1101 &1131199853383832992 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: + m_Conditions: + - m_ConditionMode: 2 + m_ConditionEvent: _died + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: 0} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 1 + serializedVersion: 3 + m_TransitionDuration: 0.25 + m_TransitionOffset: 0 + m_ExitTime: 0.765625 + m_HasExitTime: 1 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1101 &1426403871767708545 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: + m_Conditions: + - m_ConditionMode: 2 + m_ConditionEvent: _died + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: 0} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 1 + serializedVersion: 3 + m_TransitionDuration: 0.25 + m_TransitionOffset: 0 + m_ExitTime: 0.765625 + m_HasExitTime: 1 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1101 &1766918516916494365 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: + m_Conditions: + - m_ConditionMode: 1 + m_ConditionEvent: _crouch + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: -1946935121236979824} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 0.25 + m_TransitionOffset: 0 + m_ExitTime: 0.75 + m_HasExitTime: 0 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1101 &2942481661802285141 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: + m_Conditions: + - m_ConditionMode: 1 + m_ConditionEvent: _died + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: 381506674367370628} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 0.45466095 + m_TransitionOffset: 0 + m_ExitTime: 0.5480226 + m_HasExitTime: 1 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1101 &4669644873837644826 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: + m_Conditions: + - m_ConditionMode: 1 + m_ConditionEvent: _died + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: 381506674367370628} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 1.0268358 + m_TransitionOffset: 0.026483208 + m_ExitTime: 0.22033934 + m_HasExitTime: 0 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1101 &5168308916736617153 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: Jump to Run + m_Conditions: [] + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: 8354844821256608690} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 0.20300466 + m_TransitionOffset: 0 + m_ExitTime: 0.7916667 + m_HasExitTime: 1 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1101 &5680347307725438578 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: + m_Conditions: + - m_ConditionMode: 2 + m_ConditionEvent: _crouch + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: 8354844821256608690} + m_Solo: 1 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 0.09322041 + m_TransitionOffset: 0 + m_ExitTime: 0.75 + m_HasExitTime: 0 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1101 &7161422939700495704 +AnimatorStateTransition: + m_ObjectHideFlags: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: + m_Conditions: + - m_ConditionMode: 1 + m_ConditionEvent: _died + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: 381506674367370628} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 0.45466095 + m_TransitionOffset: 0 + m_ExitTime: 0.5480226 + m_HasExitTime: 1 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 +--- !u!1102 &7739110899394721029 AnimatorState: serializedVersion: 6 m_ObjectHideFlags: 1 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_Name: Run_gunMiddle_AR + m_Name: Jump m_Speed: 1 m_CycleOffset: 0 m_Transitions: - - {fileID: 1101213140064716440} + - {fileID: -5341886129914063569} + - {fileID: -4114131529631250501} m_StateMachineBehaviours: [] m_Position: {x: 50, y: 50, z: 0} m_IKOnFeet: 0 @@ -750,24 +843,26 @@ AnimatorState: m_MirrorParameterActive: 0 m_CycleOffsetParameterActive: 0 m_TimeParameterActive: 0 - m_Motion: {fileID: 7400000, guid: d805ca1ced9a1054c9cb7e1dc431a6e3, type: 3} + m_Motion: {fileID: 7400000, guid: c09541f4236345c4fa4e4745793a59f3, type: 3} m_Tag: m_SpeedParameter: m_MirrorParameter: m_CycleOffsetParameter: m_TimeParameter: ---- !u!1102 &1102949299928912516 +--- !u!1102 &8354844821256608690 AnimatorState: serializedVersion: 6 m_ObjectHideFlags: 1 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_Name: Shoot_SingleShot_AR + m_Name: Locomotion m_Speed: 1 m_CycleOffset: 0 m_Transitions: - - {fileID: 1101616875765159898} + - {fileID: -4865886577319040672} + - {fileID: 1766918516916494365} + - {fileID: 4669644873837644826} m_StateMachineBehaviours: [] m_Position: {x: 50, y: 50, z: 0} m_IKOnFeet: 0 @@ -777,76 +872,34 @@ AnimatorState: m_MirrorParameterActive: 0 m_CycleOffsetParameterActive: 0 m_TimeParameterActive: 0 - m_Motion: {fileID: 7400000, guid: 7e58f38d10fcc9a498d348e7339ce3ec, type: 3} + m_Motion: {fileID: -1862914767576164720} m_Tag: m_SpeedParameter: m_MirrorParameter: m_CycleOffsetParameter: m_TimeParameter: ---- !u!1107 &1107476691846109156 -AnimatorStateMachine: - serializedVersion: 6 +--- !u!1101 &9141976730198879995 +AnimatorStateTransition: m_ObjectHideFlags: 1 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_Name: Base Layer - m_ChildStates: - - serializedVersion: 1 - m_State: {fileID: 1102593466183913558} - m_Position: {x: 300, y: 108, z: 0} - - serializedVersion: 1 - m_State: {fileID: 1102238716766734490} - m_Position: {x: 300, y: 168, z: 0} - - serializedVersion: 1 - m_State: {fileID: 1102100670134039114} - m_Position: {x: 300, y: 228, z: 0} - - serializedVersion: 1 - m_State: {fileID: 1102277268960662982} - m_Position: {x: 280, y: 440, z: 0} - - serializedVersion: 1 - m_State: {fileID: 1102885244088947746} - m_Position: {x: 300, y: 288, z: 0} - - serializedVersion: 1 - m_State: {fileID: 1102088155696507906} - m_Position: {x: 516, y: 444, z: 0} - - serializedVersion: 1 - m_State: {fileID: 1102625340767904258} - m_Position: {x: 528, y: 300, z: 0} - - serializedVersion: 1 - m_State: {fileID: 1102901032506865742} - m_Position: {x: 768, y: 444, z: 0} - - serializedVersion: 1 - m_State: {fileID: 1102949299928912516} - m_Position: {x: 528, y: 108, z: 0} - - serializedVersion: 1 - m_State: {fileID: 1102642198119822076} - m_Position: {x: 528, y: 168, z: 0} - - serializedVersion: 1 - m_State: {fileID: 1102106218193099826} - m_Position: {x: 528, y: 228, z: 0} - - serializedVersion: 1 - m_State: {fileID: 1102243588419055754} - m_Position: {x: 768, y: 108, z: 0} - - serializedVersion: 1 - m_State: {fileID: 1102525727787806748} - m_Position: {x: 768, y: 168, z: 0} - - serializedVersion: 1 - m_State: {fileID: 1102361151154106906} - m_Position: {x: 768, y: 228, z: 0} - - serializedVersion: 1 - m_State: {fileID: 1102379806414260514} - m_Position: {x: 768, y: 300, z: 0} - - serializedVersion: 1 - m_State: {fileID: 1102942199553133880} - m_Position: {x: 768, y: 372, z: 0} - m_ChildStateMachines: [] - m_AnyStateTransitions: [] - m_EntryTransitions: [] - m_StateMachineTransitions: {} - m_StateMachineBehaviours: [] - m_AnyStatePosition: {x: 50, y: 20, z: 0} - m_EntryPosition: {x: 50, y: 120, z: 0} - m_ExitPosition: {x: 48, y: 72, z: 0} - m_ParentStateMachinePosition: {x: 800, y: 20, z: 0} - m_DefaultState: {fileID: 1102593466183913558} + m_Name: + m_Conditions: + - m_ConditionMode: 1 + m_ConditionEvent: _died + m_EventTreshold: 0 + m_DstStateMachine: {fileID: 0} + m_DstState: {fileID: 381506674367370628} + m_Solo: 0 + m_Mute: 0 + m_IsExit: 0 + serializedVersion: 3 + m_TransitionDuration: 0.45466095 + m_TransitionOffset: 0 + m_ExitTime: 0.5480226 + m_HasExitTime: 1 + m_HasFixedDuration: 1 + m_InterruptionSource: 0 + m_OrderedInterruption: 1 + m_CanTransitionToSelf: 1 diff --git a/Assets/Defaults/mat_proBuilder.mat.meta b/Assets/Scripts/Animators/Leonid Animator/CharacterAnimator.controller.meta old mode 100755 new mode 100644 similarity index 64% rename from Assets/Defaults/mat_proBuilder.mat.meta rename to Assets/Scripts/Animators/Leonid Animator/CharacterAnimator.controller.meta index 957f067..63abf20 --- a/Assets/Defaults/mat_proBuilder.mat.meta +++ b/Assets/Scripts/Animators/Leonid Animator/CharacterAnimator.controller.meta @@ -1,8 +1,8 @@ fileFormatVersion: 2 -guid: 536ae120520e6f245bad32b94a03f6a2 +guid: 3ebf60422b6cb1c498ee4cf238072b43 NativeFormatImporter: externalObjects: {} - mainObjectFileID: 2100000 + mainObjectFileID: 9100000 userData: assetBundleName: assetBundleVariant: diff --git a/Assets/Scripts/Animators/Leonid Animator/CharacterLocomotion.cs b/Assets/Scripts/Animators/Leonid Animator/CharacterLocomotion.cs new file mode 100644 index 0000000..af2c968 --- /dev/null +++ b/Assets/Scripts/Animators/Leonid Animator/CharacterLocomotion.cs @@ -0,0 +1,105 @@ +using UnityEngine; + +namespace Animators.Leonid_Animator +{ + [RequireComponent( + typeof(Rigidbody), + typeof(InputHandler), + typeof(AnimatorHandler))] + public class CharacterLocomotion : MonoBehaviour + { + private Transform _cameraObject; + private InputHandler _inputHandler; + private Vector3 _moveDirection; + + [HideInInspector] public Transform myTransform; + [HideInInspector] public AnimatorHandler myAnimatorHandler; + + public Rigidbody myRigidbody; + [SerializeField] public float jumpForce; + public GameObject normalCamera; + + [Header("Stats")] + [SerializeField] private float movementSpeed = 5; + [SerializeField] private float rotationSpeed = 10; + + private void Start() + { + myRigidbody = GetComponent(); + _inputHandler = GetComponent(); + myAnimatorHandler = GetComponent(); + + _cameraObject = Camera.main.transform; + myTransform = transform; + myAnimatorHandler.Initialize(); + } + + private void Update() + { + var deltaTime = Time.deltaTime; + _inputHandler.TickInput(deltaTime); + _moveDirection = _cameraObject.forward * _inputHandler.vertical + + _cameraObject.right * _inputHandler.horizontal; + _moveDirection.Normalize(); + _moveDirection *= movementSpeed; + _moveDirection.y = 0; + + var projectedVelocity = Vector3.ProjectOnPlane(_moveDirection, _normalVector); + myRigidbody.velocity = projectedVelocity; + + if (myAnimatorHandler.canRotate) + { + HandleRotation(deltaTime); + } + + myAnimatorHandler.UpdateAnimatorValues( + _inputHandler.moveAmount, + 0, + _inputHandler.jumpPressed, + _inputHandler.crouchPressed, + _inputHandler.firePressed); + + var velocity = myRigidbody.velocity; + myRigidbody.AddForce(_inputHandler.jumpPressed ? + new Vector3(0, jumpForce, 0) + : new Vector3(velocity.x*100, -50, velocity.z * 100)); + } + + private void LateUpdate() + { + _inputHandler.jumpPressed = false; + _inputHandler.crouchPressed = false; + } + + #region Movement + + private Vector3 _normalVector; + private Vector3 _targetPosition; + + private void HandleRotation(float delta) + { + if (Mathf.Abs(_inputHandler.horizontal) + Mathf.Abs(_inputHandler.vertical) < 0.1) + { + print("stop"); + return; + } + + print("begin"); + var moveAmount = _inputHandler.moveAmount; + var targetDir = _cameraObject.forward * _inputHandler.vertical + + _cameraObject.right * _inputHandler.horizontal; + targetDir.Normalize(); + targetDir.y = 0; + if (targetDir == Vector3.zero) + targetDir = myTransform.forward; + + var rotSpeed = rotationSpeed; + + var rotation = Quaternion.LookRotation(targetDir); + var targetRotation = Quaternion.Slerp(myTransform.rotation, rotation, rotationSpeed * delta); + myTransform.rotation = targetRotation; + } + + #endregion + } +} \ No newline at end of file diff --git a/Assets/Scripts/Animators/Leonid Animator/CharacterLocomotion.cs.meta b/Assets/Scripts/Animators/Leonid Animator/CharacterLocomotion.cs.meta new file mode 100644 index 0000000..715f6df --- /dev/null +++ b/Assets/Scripts/Animators/Leonid Animator/CharacterLocomotion.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: aeafb7b8074141969e8779cd3d4a9d08 +timeCreated: 1652026088 \ No newline at end of file diff --git a/Assets/Scripts/Animators/Leonid Animator/InputHandler.cs b/Assets/Scripts/Animators/Leonid Animator/InputHandler.cs new file mode 100644 index 0000000..3e971e7 --- /dev/null +++ b/Assets/Scripts/Animators/Leonid Animator/InputHandler.cs @@ -0,0 +1,77 @@ +using CameraScripts; +using UnityEngine; + +namespace Animators.Leonid_Animator +{ + public class InputHandler : MonoBehaviour + { + public float horizontal; + public float vertical; + public float moveAmount; + public float mouseX; + public float mouseY; + + public bool crouchPressed; + public bool jumpPressed; + public bool firePressed; + + private ThirdPersonViewInput _inputActions; + private Vector2 _movementInput; + private Vector2 _cameraInput; + + private CameraHandler _cameraHandler; + + private void Awake() + { + _cameraHandler = CameraHandler.Singleton; + if (_cameraHandler == null) + Debug.LogError("Camera Handler not found"); + } + + private void Update() + { + _cameraHandler.TargetPosition(Time.deltaTime); + _cameraHandler.HandleCameraRotation(Time.deltaTime, mouseX, mouseY); + } + + private void OnEnable() + { + if (_inputActions is null) + { + _inputActions = new ThirdPersonViewInput(); + _inputActions.PlayerMovement.Movement.performed += + context => _movementInput = context.ReadValue(); + _inputActions.PlayerMovement.Camera.performed += + context => _cameraInput = context.ReadValue(); + _inputActions.PlayerActions.Crouch.performed += + context => crouchPressed = true; + _inputActions.PlayerActions.Jump.performed += + context => jumpPressed = true; + _inputActions.PlayerActions.Fire.performed += + context => firePressed = true; + _inputActions.PlayerActions.Fire.canceled += + context => firePressed = false; + } + _inputActions.Enable(); + } + + private void OnDisable() + { + _inputActions.Disable(); + } + + public void TickInput(float delta) + { + MoveInput(delta); + } + + private void MoveInput(float delta) + { + horizontal = _movementInput.x; + vertical = _movementInput.y; + moveAmount = Mathf.Clamp01(Mathf.Abs(horizontal) + Mathf.Abs(vertical)); + mouseX = _cameraInput.x; + mouseY = _cameraInput.y; + } + } +} \ No newline at end of file diff --git a/Assets/Scripts/Animators/Leonid Animator/InputHandler.cs.meta b/Assets/Scripts/Animators/Leonid Animator/InputHandler.cs.meta new file mode 100644 index 0000000..c4d1f8e --- /dev/null +++ b/Assets/Scripts/Animators/Leonid Animator/InputHandler.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: 77c38ddfaba349c590d4a6583f7efac4 +timeCreated: 1652025145 \ No newline at end of file diff --git a/Assets/Scripts/Animators/Leonid Animator/LowerBody.mask b/Assets/Scripts/Animators/Leonid Animator/LowerBody.mask new file mode 100644 index 0000000..ebcf1fa --- /dev/null +++ b/Assets/Scripts/Animators/Leonid Animator/LowerBody.mask @@ -0,0 +1,139 @@ +%YAML 1.1 +%TAG !u! tag:unity3d.com,2011: +--- !u!319 &31900000 +AvatarMask: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: LowerBody + m_Mask: 01000000010000000100000001000000010000000100000001000000010000000100000001000000010000000100000001000000 + m_Elements: + - m_Path: + m_Weight: 1 + - m_Path: Arm1 + m_Weight: 1 + - m_Path: AssaultRifle + m_Weight: 1 + - m_Path: Backpack1 + m_Weight: 1 + - m_Path: Body1 + m_Weight: 1 + - m_Path: head1 + m_Weight: 1 + - m_Path: Hips + m_Weight: 1 + - m_Path: Hips/ArmPosition_Left + m_Weight: 1 + - m_Path: Hips/ArmPosition_Right + m_Weight: 1 + - m_Path: Hips/ArmPosition_Right/magazine_Right + m_Weight: 1 + - m_Path: Hips/ArmPosition_Right/Trigger_Right + m_Weight: 1 + - m_Path: Hips/Spine + m_Weight: 1 + - m_Path: Hips/Spine/Chest + m_Weight: 1 + - m_Path: Hips/Spine/Chest/BackPack + m_Weight: 1 + - m_Path: Hips/Spine/Chest/BackPack/ArmPlacement_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/BackPack/ArmPlacement_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/BackPack/ArmPlacement_Upper + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Neck + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Neck/Head + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Neck/Head/Headgear_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Neck/Head/Headgear_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/ShoulderPadCTRL_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/ShoulderPadCTRL_Left/ShoulderPadBlade_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/ShoulderPadCTRL_Left/ShoulderPadBody_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/Index_Proximal_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/Index_Proximal_Left/Index_Intermediate_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/Index_Proximal_Left/Index_Intermediate_Left/Index_Distal_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/RestOfFingers_Proximal_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/RestOfFingers_Proximal_Left/RestOfFingers_Intermediate_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/RestOfFingers_Proximal_Left/RestOfFingers_Intermediate_Left/RestOfFingers_Distal_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/Thumb_Proximal_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/Thumb_Proximal_Left/Thumb_Intermediate_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/Thumb_Proximal_Left/Thumb_Intermediate_Left/Thumb_Distal_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/ShoulderPadCTRL_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/ShoulderPadCTRL_Right/ShoulderPadBlade_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/ShoulderPadCTRL_Right/ShoulderPadBody_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/Index_Proximal_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/Index_Proximal_Right/Index_Intermediate_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/Index_Proximal_Right/Index_Intermediate_Right/Index_Distal_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/RestOfFingers_Proximal_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/RestOfFingers_Proximal_Right/RestOfFingers_Intermediate_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/RestOfFingers_Proximal_Right/RestOfFingers_Intermediate_Right/RestOfFingers_Distal_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/Thumb_Proximal_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/Thumb_Proximal_Right/Thumb_Intermediate_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/Thumb_Proximal_Right/Thumb_Intermediate_Right/Thumb_Distal_Right + m_Weight: 1 + - m_Path: Hips/UpperLeg_Left + m_Weight: 1 + - m_Path: Hips/UpperLeg_Left/LowerLeg_Left + m_Weight: 1 + - m_Path: Hips/UpperLeg_Left/LowerLeg_Left/Foot_Left + m_Weight: 1 + - m_Path: Hips/UpperLeg_Left/LowerLeg_Left/Foot_Left/Toe_Left + m_Weight: 1 + - m_Path: Hips/UpperLeg_Left/LowerLeg_Left/Foot_Left/Toe_Left/Toetip_Left + m_Weight: 1 + - m_Path: Hips/UpperLeg_Right + m_Weight: 1 + - m_Path: Hips/UpperLeg_Right/LowerLeg_Right + m_Weight: 1 + - m_Path: Hips/UpperLeg_Right/LowerLeg_Right/Foot_Right + m_Weight: 1 + - m_Path: Hips/UpperLeg_Right/LowerLeg_Right/Foot_Right/Toe_Right + m_Weight: 1 + - m_Path: Hips/UpperLeg_Right/LowerLeg_Right/Foot_Right/Toe_Right/Toetip_Right + m_Weight: 1 + - m_Path: Leg1 + m_Weight: 1 diff --git a/Assets/Scripts/Animators/Leonid Animator/LowerBody.mask.meta b/Assets/Scripts/Animators/Leonid Animator/LowerBody.mask.meta new file mode 100644 index 0000000..ef5e183 --- /dev/null +++ b/Assets/Scripts/Animators/Leonid Animator/LowerBody.mask.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 1122aed799ca7574a8f0d2efa30e9d99 +NativeFormatImporter: + externalObjects: {} + mainObjectFileID: 31900000 + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Scripts/Animators/Leonid Animator/UpperBody.mask b/Assets/Scripts/Animators/Leonid Animator/UpperBody.mask new file mode 100644 index 0000000..0384688 --- /dev/null +++ b/Assets/Scripts/Animators/Leonid Animator/UpperBody.mask @@ -0,0 +1,139 @@ +%YAML 1.1 +%TAG !u! tag:unity3d.com,2011: +--- !u!319 &31900000 +AvatarMask: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: UpperBody + m_Mask: 00000000010000000100000000000000000000000100000001000000010000000100000000000000000000000000000000000000 + m_Elements: + - m_Path: + m_Weight: 1 + - m_Path: Arm1 + m_Weight: 1 + - m_Path: AssaultRifle + m_Weight: 1 + - m_Path: Backpack1 + m_Weight: 1 + - m_Path: Body1 + m_Weight: 1 + - m_Path: head1 + m_Weight: 1 + - m_Path: Hips + m_Weight: 1 + - m_Path: Hips/ArmPosition_Left + m_Weight: 1 + - m_Path: Hips/ArmPosition_Right + m_Weight: 1 + - m_Path: Hips/ArmPosition_Right/magazine_Right + m_Weight: 1 + - m_Path: Hips/ArmPosition_Right/Trigger_Right + m_Weight: 1 + - m_Path: Hips/Spine + m_Weight: 1 + - m_Path: Hips/Spine/Chest + m_Weight: 1 + - m_Path: Hips/Spine/Chest/BackPack + m_Weight: 1 + - m_Path: Hips/Spine/Chest/BackPack/ArmPlacement_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/BackPack/ArmPlacement_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/BackPack/ArmPlacement_Upper + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Neck + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Neck/Head + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Neck/Head/Headgear_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Neck/Head/Headgear_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/ShoulderPadCTRL_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/ShoulderPadCTRL_Left/ShoulderPadBlade_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/ShoulderPadCTRL_Left/ShoulderPadBody_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/Index_Proximal_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/Index_Proximal_Left/Index_Intermediate_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/Index_Proximal_Left/Index_Intermediate_Left/Index_Distal_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/RestOfFingers_Proximal_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/RestOfFingers_Proximal_Left/RestOfFingers_Intermediate_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/RestOfFingers_Proximal_Left/RestOfFingers_Intermediate_Left/RestOfFingers_Distal_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/Thumb_Proximal_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/Thumb_Proximal_Left/Thumb_Intermediate_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Left/UpperArm_Left/LowerArm_Left/Hand_Left/Thumb_Proximal_Left/Thumb_Intermediate_Left/Thumb_Distal_Left + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/ShoulderPadCTRL_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/ShoulderPadCTRL_Right/ShoulderPadBlade_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/ShoulderPadCTRL_Right/ShoulderPadBody_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/Index_Proximal_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/Index_Proximal_Right/Index_Intermediate_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/Index_Proximal_Right/Index_Intermediate_Right/Index_Distal_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/RestOfFingers_Proximal_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/RestOfFingers_Proximal_Right/RestOfFingers_Intermediate_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/RestOfFingers_Proximal_Right/RestOfFingers_Intermediate_Right/RestOfFingers_Distal_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/Thumb_Proximal_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/Thumb_Proximal_Right/Thumb_Intermediate_Right + m_Weight: 1 + - m_Path: Hips/Spine/Chest/Shoulder_Right/UpperArm_Right/LowerArm_Right/Hand_Right/Thumb_Proximal_Right/Thumb_Intermediate_Right/Thumb_Distal_Right + m_Weight: 1 + - m_Path: Hips/UpperLeg_Left + m_Weight: 1 + - m_Path: Hips/UpperLeg_Left/LowerLeg_Left + m_Weight: 1 + - m_Path: Hips/UpperLeg_Left/LowerLeg_Left/Foot_Left + m_Weight: 1 + - m_Path: Hips/UpperLeg_Left/LowerLeg_Left/Foot_Left/Toe_Left + m_Weight: 1 + - m_Path: Hips/UpperLeg_Left/LowerLeg_Left/Foot_Left/Toe_Left/Toetip_Left + m_Weight: 1 + - m_Path: Hips/UpperLeg_Right + m_Weight: 1 + - m_Path: Hips/UpperLeg_Right/LowerLeg_Right + m_Weight: 1 + - m_Path: Hips/UpperLeg_Right/LowerLeg_Right/Foot_Right + m_Weight: 1 + - m_Path: Hips/UpperLeg_Right/LowerLeg_Right/Foot_Right/Toe_Right + m_Weight: 1 + - m_Path: Hips/UpperLeg_Right/LowerLeg_Right/Foot_Right/Toe_Right/Toetip_Right + m_Weight: 1 + - m_Path: Leg1 + m_Weight: 1 diff --git a/Assets/Scripts/Animators/Leonid Animator/UpperBody.mask.meta b/Assets/Scripts/Animators/Leonid Animator/UpperBody.mask.meta new file mode 100644 index 0000000..42f9f75 --- /dev/null +++ b/Assets/Scripts/Animators/Leonid Animator/UpperBody.mask.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 368b178fc56a14549b588ee80c7cbf81 +NativeFormatImporter: + externalObjects: {} + mainObjectFileID: 31900000 + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Scripts/CameraScripts.meta b/Assets/Scripts/CameraScripts.meta new file mode 100644 index 0000000..28c64a8 --- /dev/null +++ b/Assets/Scripts/CameraScripts.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: 563fa8c0f982459e8a6357c9f9078744 +timeCreated: 1652086279 \ No newline at end of file diff --git a/Assets/Scripts/CameraScripts/CameraHandler.cs b/Assets/Scripts/CameraScripts/CameraHandler.cs new file mode 100644 index 0000000..d9d4672 --- /dev/null +++ b/Assets/Scripts/CameraScripts/CameraHandler.cs @@ -0,0 +1,61 @@ +using System; +using Unity.Mathematics; +using UnityEngine; + +namespace CameraScripts +{ + public class CameraHandler : MonoBehaviour + { + public Transform targetTransform; + public Transform cameraTransform; + public Transform cameraPivotTransform; + private Transform _myTransform; + private Vector3 _cameraTransformPosition; + private LayerMask ignoreLayers = ~(1 << 8 | 1 << 9 | 1 << 10); + + public static CameraHandler Singleton; + + public const float LookSpeed = 0.1f; + public const float FollowSpeed = 0.1f; + public const float PivotSpeed = 0.03f; + + private float _defaultPosition; + private float _lookAngle; + private float _pivotAngle; + + public float minimumPivot = -35; + public float maximumPivot = 35; + + + private void Awake() + { + Application.targetFrameRate = 60; + Singleton = this; + _myTransform = transform; + _defaultPosition = _myTransform.localPosition.z; + } + + public void TargetPosition(float delta) + { + var toTargetPosition = Vector3.Lerp(_myTransform.position, targetTransform.position, delta /FollowSpeed); + _myTransform.position = toTargetPosition; + } + + public void HandleCameraRotation(float delta, float mouseX, float mouseY) + { + _lookAngle += (mouseX * LookSpeed) / delta; + _pivotAngle -= (mouseY * PivotSpeed) / delta; + _pivotAngle = Mathf.Clamp(_pivotAngle, minimumPivot, maximumPivot); + + var rotation = Vector3.zero; + rotation.y = _lookAngle; + var targetRotation = Quaternion.Euler(rotation); + _myTransform.rotation = targetRotation; + + rotation = Vector3.zero; + rotation.x = _pivotAngle; + targetRotation = Quaternion.Euler(rotation); + cameraPivotTransform.localRotation = targetRotation; + } + } +} \ No newline at end of file diff --git a/Assets/Scripts/CameraScripts/CameraHandler.cs.meta b/Assets/Scripts/CameraScripts/CameraHandler.cs.meta new file mode 100644 index 0000000..b682885 --- /dev/null +++ b/Assets/Scripts/CameraScripts/CameraHandler.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: 3d606407023147d7b4d530a9593e9697 +timeCreated: 1652086288 \ No newline at end of file diff --git a/Assets/Scripts/Character/NPC.cs b/Assets/Scripts/Character/NPC.cs index 7af2ae9..9c62f45 100644 --- a/Assets/Scripts/Character/NPC.cs +++ b/Assets/Scripts/Character/NPC.cs @@ -1,5 +1,6 @@ using System; using System.Collections.Generic; +using System.Threading.Tasks; using Unity.MLAgents; using Unity.MLAgents.Actuators; using Unity.MLAgents.Sensors; diff --git a/Assets/Scripts/Character/scr_Models.cs b/Assets/Scripts/Character/scr_Models.cs deleted file mode 100755 index c735db5..0000000 --- a/Assets/Scripts/Character/scr_Models.cs +++ /dev/null @@ -1,75 +0,0 @@ -using System; -using UnityEngine; - -public static class scr_Models -{ - #region Player - - public enum PlayerStance - { - Stand, - Crouch, - Prone - } - - [Serializable] - public class PlayerSettingsModel - { - [Header("View Settings")] - public float ViewXSensetivity; - public float ViewYSensetivity; - - public bool ViewXInverted; - public bool ViewYInverted; - - [Header("Movement Settings")] - public bool SprintingHold; - public float MovementSmoothing; - - [Header("Movement - Running")] - public float RunningForwardSpeed; - public float RunningStrafeSpeed; - - [Header("Movement - Walking")] - public float WalkingForwardSpeed; - public float WalkingBackwardSpeed; - public float WalkingStrafeSpeed; - - [Header("Jumping")] - public float JumpingHeight; - public float JumpingFalloff; - public float FallingSmoothing; - - [Header("Speed Effectors")] - public float SpeedEffector = 1; - public float CrouchSpeedEffector; - public float ProneSpeedEffector; - public float FallingSpeedEffector; - } - - [Serializable] - public class CharacterStance - { - public float CameraHeight; - public CapsuleCollider StanceCollider; - } - - #endregion - - #region - Weapons - - - [Serializable] - public class WeaponSettingsModel - { - [Header("Sway")] - public float SwayAmount; - public bool SwayYInverted; - public bool SwayXInverted; - public float SwaySmoothing; - public float SwayResetSmoothing; - public float SwayClampX; - public float SwayClampY; - } - - #endregion -} diff --git a/Assets/Scripts/Misc/NavPoint.cs b/Assets/Scripts/Misc/NavPoint.cs index a70fd61..bddfadb 100755 --- a/Assets/Scripts/Misc/NavPoint.cs +++ b/Assets/Scripts/Misc/NavPoint.cs @@ -14,12 +14,11 @@ public class NavPoint : MonoBehaviour public Vector3 Position => gameObject.transform.position; public float FlagDistance { get; private set; } - public NavPointType navType = NavPointType.Direction; + [SerializeField] public NavPointType navType; - [HideInInspector] - public int PointId = 0; - public float DeathAttr = 0; - public List EnemiesSeen = new List(); + [HideInInspector] public int PointId = 0; + [HideInInspector] public float DeathAttr = 0; + [HideInInspector] public List EnemiesSeen = new List(); private void Start() { diff --git a/Assets/Scripts/Weapons/scr_WeaponController.cs b/Assets/Scripts/Weapons/WeaponController.cs old mode 100755 new mode 100644 similarity index 96% rename from Assets/Scripts/Weapons/scr_WeaponController.cs rename to Assets/Scripts/Weapons/WeaponController.cs index 613fc61..d79577a --- a/Assets/Scripts/Weapons/scr_WeaponController.cs +++ b/Assets/Scripts/Weapons/WeaponController.cs @@ -1,47 +1,46 @@ -using UnityEngine; -using static scr_Models; -public class scr_WeaponController : MonoBehaviour -{ - private scr_CharacterController characterController; - [Header("Settings")] - public WeaponSettingsModel settings; - - private bool isInitialised; - - Vector3 newWeaponRotation; - Vector3 newWeaponRotationVelocity; - - Vector3 targetWeaponRotation; - Vector3 targetWeaponRotationVelocity; - - private void Start() - { - newWeaponRotation = transform.localRotation.eulerAngles; - } - - public void Initialise(scr_CharacterController CharacterController) - { - characterController = CharacterController; - isInitialised = true; - } - - public void Update() - { - if (!isInitialised) - { - return; - } - - targetWeaponRotation.y += settings.SwayAmount * (settings.SwayXInverted ? -characterController.input_View.x : characterController.input_View.x) * Time.deltaTime; - targetWeaponRotation.x += settings.SwayAmount * (settings.SwayYInverted ? characterController.input_View.y : -characterController.input_View.y) * Time.deltaTime; - //newWeaponRotation.x = Mathf.Clamp(newWeaponRotation.x, ViewClampYMin, ViewClampYMax); - - targetWeaponRotation.x = Mathf.Clamp(targetWeaponRotation.x, -settings.SwayClampX, settings.SwayClampX); - targetWeaponRotation.y = Mathf.Clamp(targetWeaponRotation.y, -settings.SwayClampY, settings.SwayClampY); - - targetWeaponRotation = Vector3.SmoothDamp(targetWeaponRotation, Vector3.zero, ref targetWeaponRotationVelocity, settings.SwayResetSmoothing); - newWeaponRotation = Vector3.SmoothDamp(newWeaponRotation, targetWeaponRotation, ref newWeaponRotationVelocity, settings.SwaySmoothing); - - transform.localRotation = Quaternion.Euler(newWeaponRotation); - } -} +using UnityEngine; +public class scr_WeaponController : MonoBehaviour +{ + private scr_CharacterController characterController; + [Header("Settings")] + public WeaponSettingsModel settings; + + private bool isInitialised; + + Vector3 newWeaponRotation; + Vector3 newWeaponRotationVelocity; + + Vector3 targetWeaponRotation; + Vector3 targetWeaponRotationVelocity; + + private void Start() + { + newWeaponRotation = transform.localRotation.eulerAngles; + } + + public void Initialise(scr_CharacterController CharacterController) + { + characterController = CharacterController; + isInitialised = true; + } + + public void Update() + { + if (!isInitialised) + { + return; + } + + targetWeaponRotation.y += settings.SwayAmount * (settings.SwayXInverted ? -characterController.input_View.x : characterController.input_View.x) * Time.deltaTime; + targetWeaponRotation.x += settings.SwayAmount * (settings.SwayYInverted ? characterController.input_View.y : -characterController.input_View.y) * Time.deltaTime; + //newWeaponRotation.x = Mathf.Clamp(newWeaponRotation.x, ViewClampYMin, ViewClampYMax); + + targetWeaponRotation.x = Mathf.Clamp(targetWeaponRotation.x, -settings.SwayClampX, settings.SwayClampX); + targetWeaponRotation.y = Mathf.Clamp(targetWeaponRotation.y, -settings.SwayClampY, settings.SwayClampY); + + targetWeaponRotation = Vector3.SmoothDamp(targetWeaponRotation, Vector3.zero, ref targetWeaponRotationVelocity, settings.SwayResetSmoothing); + newWeaponRotation = Vector3.SmoothDamp(newWeaponRotation, targetWeaponRotation, ref newWeaponRotationVelocity, settings.SwaySmoothing); + + transform.localRotation = Quaternion.Euler(newWeaponRotation); + } +} diff --git a/Assets/Scripts/Weapons/scr_WeaponController.cs.meta b/Assets/Scripts/Weapons/WeaponController.cs.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Scripts/Weapons/scr_WeaponController.cs.meta rename to Assets/Scripts/Weapons/WeaponController.cs.meta diff --git a/ProjectSettings/EditorBuildSettings.asset b/ProjectSettings/EditorBuildSettings.asset index 0147887..0c40c51 100755 --- a/ProjectSettings/EditorBuildSettings.asset +++ b/ProjectSettings/EditorBuildSettings.asset @@ -5,4 +5,6 @@ EditorBuildSettings: m_ObjectHideFlags: 0 serializedVersion: 2 m_Scenes: [] - m_configObjects: {} + m_configObjects: + com.unity.input.settings: {fileID: 11400000, guid: 4480bcbc35319b14588f3c0eb33e88c8, + type: 2} diff --git a/ProjectSettings/ProjectVersion.txt b/ProjectSettings/ProjectVersion.txt new file mode 100644 index 0000000..89a11d7 --- /dev/null +++ b/ProjectSettings/ProjectVersion.txt @@ -0,0 +1,2 @@ +m_EditorVersion: 2019.4.35f1 +m_EditorVersionWithRevision: 2019.4.35f1 (0462406dff2e) diff --git a/ProjectSettings/TagManager.asset b/ProjectSettings/TagManager.asset index 2730803..3a479c3 100755 --- a/ProjectSettings/TagManager.asset +++ b/ProjectSettings/TagManager.asset @@ -20,7 +20,7 @@ TagManager: - - Player - - - + - Controller - - - diff --git a/test-ml-agents.ipynb b/test-ml-agents.ipynb new file mode 100644 index 0000000..1e2d31e --- /dev/null +++ b/test-ml-agents.ipynb @@ -0,0 +1,20623 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 34, + "id": "8104e2db-b1a8-40a0-a238-5d9149fd74b0", + "metadata": {}, + "outputs": [], + "source": [ + "from mlagents_envs.environment import UnityEnvironment\n", + "import mlagents_envs\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 74, + "id": "6f477382-acc9-4aec-907a-7f58caf955ed", + "metadata": {}, + "outputs": [], + "source": [ + "import random" + ] + }, + { + "cell_type": "code", + "execution_count": 83, + "id": "b7f60f26-0a90-4ea5-b2c2-b5683bda56a6", + "metadata": {}, + "outputs": [], + "source": [ + "env = UnityEnvironment()" + ] + }, + { + "cell_type": "code", + "execution_count": 84, + "id": "5929b410-12c3-4bd9-b984-b2c29a76c3f3", + "metadata": {}, + "outputs": [], + "source": [ + "env.reset()" + ] + }, + { + "cell_type": "code", + "execution_count": 85, + "id": "f108ff09-9f42-4405-add3-6df941c48f8b", + "metadata": { + "scrolled": true, + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 1. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 1. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n" + ] + }, + { + "ename": "UnityCommunicatorStoppedException", + "evalue": "Communicator has exited.", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mUnityCommunicatorStoppedException\u001b[0m Traceback (most recent call last)", + "Input \u001b[0;32mIn [85]\u001b[0m, in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[0;32m----> 2\u001b[0m \u001b[43menv\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 3\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 4\u001b[0m asd \u001b[38;5;241m=\u001b[39m env\u001b[38;5;241m.\u001b[39mget_steps(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mnpc?team=0\u001b[39m\u001b[38;5;124m'\u001b[39m)\n", + "File \u001b[0;32m~/opt/miniforge3/lib/python3.9/site-packages/mlagents_envs/timers.py:305\u001b[0m, in \u001b[0;36mtimed..wrapped\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 303\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mwrapped\u001b[39m(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 304\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m hierarchical_timer(func\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__qualname__\u001b[39m):\n\u001b[0;32m--> 305\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/opt/miniforge3/lib/python3.9/site-packages/mlagents_envs/environment.py:350\u001b[0m, in \u001b[0;36mUnityEnvironment.step\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 348\u001b[0m outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_communicator\u001b[38;5;241m.\u001b[39mexchange(step_input, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_poll_process)\n\u001b[1;32m 349\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m outputs \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 350\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m UnityCommunicatorStoppedException(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCommunicator has exited.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 351\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_update_behavior_specs(outputs)\n\u001b[1;32m 352\u001b[0m rl_output \u001b[38;5;241m=\u001b[39m outputs\u001b[38;5;241m.\u001b[39mrl_output\n", + "\u001b[0;31mUnityCommunicatorStoppedException\u001b[0m: Communicator has exited." + ] + } + ], + "source": [ + "while True:\n", + " env.step()\n", + " print(\"Step\")\n", + " asd = env.get_steps('npc?team=0')\n", + " print(asd)\n", + " print(asd[0][0])\n", + " _id = asd[0][0].obs[0][0][0]\n", + " print(_id)\n", + " env.set_action_for_agent('npc?team=0', 0, mlagents_envs.environment.ActionTuple(discrete=np.array([[1, 0, random.randint(0,2)]])))" + ] + }, + { + "cell_type": "code", + "execution_count": 86, + "id": "db100c84-22ab-491b-b68d-4d5c1bbc66a3", + "metadata": {}, + "outputs": [], + "source": [ + "env.close()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +}