diff --git a/.gitignore b/.gitignore index c2e55b3..b0e4fab 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,11 @@ # # Get latest from https://github.com/github/gitignore/blob/main/Unity.gitignore # + +# MLagents +Assets/ML-Agents/Timers/* +# + /[Ll]ibrary/ /[Tt]emp/ /[Oo]bj/ diff --git a/1 b/1 new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/1 @@ -0,0 +1 @@ + diff --git a/Assets/ML-Agents/Timers/DemoScene01_timers.json b/Assets/ML-Agents/Timers/DemoScene01_timers.json deleted file mode 100755 index 3914503..0000000 --- a/Assets/ML-Agents/Timers/DemoScene01_timers.json +++ /dev/null @@ -1 +0,0 @@ -{"count":1,"self":249.99626239999998,"total":250.70272609999998,"children":{"InitializeActuators":{"count":1,"self":0.0005131,"total":0.0005131,"children":null},"AgentSendState":{"count":8389,"self":0.1187579,"total":0.1187579,"children":null},"DecideAction":{"count":8389,"self":0.5871921,"total":0.5871921,"children":null}},"gauges":{},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1649259643","unity_version":"2019.4.36f1","command_line_arguments":"C:\\Program Files\\Unity\\Hub\\Editor\\2019.4.36f1\\Editor\\Unity.exe -projectpath D:\\real_shooter -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-user -hubSessionId 91406950-b5ad-11ec-a63c-e7b76cbae13d -accessToken EBt97pMhHqClFDnjD_uh-3vplxP-uI2yS0WK-hSxfuM012f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"DemoScene01","end_time_seconds":"1649259893"}} \ No newline at end of file diff --git a/Assets/ML-Agents/Timers/DemoScene01_timers.json.meta b/Assets/ML-Agents/Timers/DemoScene01_timers.json.meta deleted file mode 100755 index fe18e4a..0000000 --- a/Assets/ML-Agents/Timers/DemoScene01_timers.json.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: 1b328c4e26e7a994e8a42f269ca7419b -TextScriptImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json deleted file mode 100755 index 87f7b00..0000000 --- a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json +++ /dev/null @@ -1,5 +0,0 @@ -<<<<<<< HEAD -{"count":1,"self":53.1106048,"total":53.237429,"children":{"InitializeActuators":{"count":2,"self":0.0019932,"total":0.0019932,"children":null},"InitializeSensors":{"count":2,"self":0.0019554999999999998,"total":0.0059847,"children":{"CreateObservableSensors":{"count":1,"self":0.0040292,"total":0.0040292,"children":null}}},"AgentSendState":{"count":1955,"self":0.0487552,"total":0.0846104,"children":{"CollectObservations":{"count":1955,"self":0.0069507,"total":0.0069507,"children":null},"WriteActionMask":{"count":1955,"self":0.0133727,"total":0.0133727,"children":null},"RequestDecision":{"count":1955,"self":0.0155318,"total":0.0155318,"children":null}}},"DecideAction":{"count":1955,"self":0.0238471,"total":0.0238471,"children":null},"AgentAct":{"count":1955,"self":0.0083594999999999989,"total":0.0083594999999999989,"children":null}},"gauges":{"Defender Behaviour.CumulativeReward":{"count":1,"max":0,"min":0,"runningAverage":0,"value":0,"weightedAverage":0},"NPC.CumulativeReward":{"count":19,"max":0,"min":0,"runningAverage":0,"value":0,"weightedAverage":0}},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1650269333","unity_version":"2019.4.36f1","command_line_arguments":"C:\\Program Files\\Unity\\Hub\\Editor\\2019.4.36f1\\Editor\\Unity.exe -projectpath D:\\real-shooter-git -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-0gxjnU2tE3R1JAgVcOu84 -hubSessionId 617c85c0-beed-11ec-9737-adc452e9defe -accessToken jGHX5jX1yB9hRz2Om-mndr4yVItZwtReMKF_pNdib-I012f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"Greatest_map_ever","end_time_seconds":"1650269386"}} -======= -{"count":1,"self":19.9775968,"total":20.018629999999998,"children":{"InitializeActuators":{"count":1,"self":0.0010006,"total":0.0010006,"children":null},"AgentSendState":{"count":765,"self":0.0057472999999999995,"total":0.0057472999999999995,"children":null},"DecideAction":{"count":765,"self":0.0332866,"total":0.0332866,"children":null}},"gauges":{},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1649742457","unity_version":"2019.4.35f1","command_line_arguments":"D:\\Unity\\2019.4.35f1\\Editor\\Unity.exe -projectpath C:\\Users\\kiril\\real-shooter -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-kiril -hubSessionId 21e9de90-ba14-11ec-9398-079edccf5e34 -accessToken oIpf_7gKWQQRilVTeJehUsFhrxasdzsG_K3j5Swtgx0009f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"Greatest_map_ever","end_time_seconds":"1649742477"}} ->>>>>>> lisin/character/1 diff --git a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json.meta b/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json.meta deleted file mode 100755 index 1400775..0000000 --- a/Assets/ML-Agents/Timers/Greatest_map_ever_timers.json.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: 80ef0d75029e25243857877facd14d75 -TextScriptImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/Assets/ML-Agents/Timers/dont touch me plz_timers.json b/Assets/ML-Agents/Timers/dont touch me plz_timers.json deleted file mode 100755 index ef1ab1e..0000000 --- a/Assets/ML-Agents/Timers/dont touch me plz_timers.json +++ /dev/null @@ -1 +0,0 @@ -{"count":1,"self":2463.6164095999998,"total":2463.7432165,"children":{"InitializeActuators":{"count":2,"self":0.0019932,"total":0.0019932,"children":null},"InitializeSensors":{"count":2,"self":0.0019554999999999998,"total":0.0059847,"children":{"CreateObservableSensors":{"count":1,"self":0.0040292,"total":0.0040292,"children":null}}},"AgentSendState":{"count":1955,"self":0.0487552,"total":0.0846104,"children":{"CollectObservations":{"count":1955,"self":0.0069507,"total":0.0069507,"children":null},"WriteActionMask":{"count":1955,"self":0.0133727,"total":0.0133727,"children":null},"RequestDecision":{"count":1955,"self":0.0155318,"total":0.0155318,"children":null}}},"DecideAction":{"count":1955,"self":0.0238471,"total":0.0238471,"children":null},"AgentAct":{"count":1955,"self":0.0083594999999999989,"total":0.0083594999999999989,"children":null},"CreateObservableSensors":{"count":18,"self":0.0019966,"total":0.0019966,"children":null}},"gauges":{"Defender Behaviour.CumulativeReward":{"count":1,"max":0,"min":0,"runningAverage":0,"value":0,"weightedAverage":0},"NPC.CumulativeReward":{"count":19,"max":0,"min":0,"runningAverage":0,"value":0,"weightedAverage":0}},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1650269333","unity_version":"2019.4.36f1","command_line_arguments":"C:\\Program Files\\Unity\\Hub\\Editor\\2019.4.36f1\\Editor\\Unity.exe -projectpath D:\\real-shooter-git -useHub -hubIPC -cloudEnvironment production -licensingIpc LicenseClient-0gxjnU2tE3R1JAgVcOu84 -hubSessionId 617c85c0-beed-11ec-9737-adc452e9defe -accessToken jGHX5jX1yB9hRz2Om-mndr4yVItZwtReMKF_pNdib-I012f","communication_protocol_version":"1.5.0","com.unity.ml-agents_version":"2.0.1","scene_name":"dont touch me plz","end_time_seconds":"1650271796"}} \ No newline at end of file diff --git a/Assets/Prefabs/Bot.prefab b/Assets/Prefabs/Bot.prefab old mode 100755 new mode 100644 index af42c97..add7a37 --- a/Assets/Prefabs/Bot.prefab +++ b/Assets/Prefabs/Bot.prefab @@ -1,6 +1,6 @@ %YAML 1.1 %TAG !u! tag:unity3d.com,2011: ---- !u!1 &2988578997639256874 +--- !u!1 &6171680323407988095 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} @@ -8,47 +8,86 @@ GameObject: m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - - component: {fileID: 2988578997639256870} - - component: {fileID: 2988578997639256869} - - component: {fileID: 2988578997639256868} - - component: {fileID: 2988578997639256875} - - component: {fileID: 5447337162552783061} - m_Layer: 0 - m_Name: Bot + - component: {fileID: 6171680323407988094} + m_Layer: 8 + m_Name: FeetTransform m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!4 &2988578997639256870 +--- !u!4 &6171680323407988094 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2988578997639256874} + m_GameObject: {fileID: 6171680323407988095} m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 28.310326, y: 13.98, z: 46.45} + m_LocalPosition: {x: 0, y: -1, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] - m_Father: {fileID: 0} + m_Father: {fileID: 6171680323948707550} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!33 &2988578997639256869 +--- !u!1 &6171680323948707524 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 6171680323948707550} + - component: {fileID: 6171680323948707551} + - component: {fileID: 5770895893828047079} + - component: {fileID: 6171680323948707521} + - component: {fileID: 6171680323948707549} + - component: {fileID: 6171680323948707520} + - component: {fileID: 8774702625908438859} + - component: {fileID: 6521536090983603910} + - component: {fileID: 6133354754598649724} + - component: {fileID: 2756943273076691504} + - component: {fileID: 6638271233700792696} + - component: {fileID: 5988018701276467001} + - component: {fileID: 1547882613481475944} + m_Layer: 8 + m_Name: Bot + m_TagString: Defender + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &6171680323948707550 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6171680323948707524} + m_LocalRotation: {x: 0, y: 0.7071068, z: 0, w: 0.7071068} + m_LocalPosition: {x: 50.67923, y: 1.16, z: -22.57} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 6171680323407988094} + m_Father: {fileID: 0} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} +--- !u!33 &6171680323948707551 MeshFilter: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2988578997639256874} + m_GameObject: {fileID: 6171680323948707524} m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!23 &2988578997639256868 +--- !u!23 &5770895893828047079 MeshRenderer: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2988578997639256874} + m_GameObject: {fileID: 6171680323948707524} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -81,33 +120,170 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!136 &2988578997639256875 +--- !u!136 &6171680323948707521 CapsuleCollider: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2988578997639256874} + m_GameObject: {fileID: 6171680323948707524} m_Material: {fileID: 0} m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.5 + m_Enabled: 0 + m_Radius: 0.3 m_Height: 2 m_Direction: 1 m_Center: {x: 0, y: 0, z: 0} ---- !u!114 &5447337162552783061 +--- !u!136 &6171680323948707549 +CapsuleCollider: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6171680323948707524} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 0 + m_Radius: 0.3 + m_Height: 1.3 + m_Direction: 1 + m_Center: {x: 0, y: -0.35, z: 0} +--- !u!136 &6171680323948707520 +CapsuleCollider: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6171680323948707524} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 1 + m_Radius: 0.3 + m_Height: 0.8 + m_Direction: 1 + m_Center: {x: 0, y: -0.6, z: 0} +--- !u!195 &8774702625908438859 +NavMeshAgent: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6171680323948707524} + m_Enabled: 1 + m_AgentTypeID: 0 + m_Radius: 0.5000001 + m_Speed: 3.5 + m_Acceleration: 8 + avoidancePriority: 50 + m_AngularSpeed: 120 + m_StoppingDistance: 0 + m_AutoTraverseOffMeshLink: 1 + m_AutoBraking: 1 + m_AutoRepath: 1 + m_Height: 2 + m_BaseOffset: 1 + m_WalkableMask: 4294967295 + m_ObstacleAvoidanceType: 4 +--- !u!114 &6521536090983603910 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2988578997639256874} + m_GameObject: {fileID: 6171680323948707524} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: d3ebcf807a37f344998fd648dfc9376d, type: 3} + m_Name: + m_EditorClassIdentifier: + navMeshAgent: {fileID: 8774702625908438859} + flag: {fileID: 6818223691859422291, guid: 1685c1d9ce4ab174f95c646b1826010b, type: 3} +--- !u!114 &6133354754598649724 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6171680323948707524} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: dd8012d5925524537b27131fef517017, type: 3} + m_Name: + m_EditorClassIdentifier: + m_SensorName: BufferSensor + m_ObservableSize: 5 + m_MaxNumObservables: 10 +--- !u!114 &2756943273076691504 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6171680323948707524} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + VectorObservationSize: 14 + NumStackedVectorObservations: 1 + m_ActionSpec: + m_NumContinuousActions: 0 + BranchSizes: 010000000100000001000000 + VectorActionSize: 010000000100000001000000 + VectorActionDescriptions: [] + VectorActionSpaceType: 0 + hasUpgradedBrainParametersWithActionSpec: 1 + m_Model: {fileID: 0} + m_InferenceDevice: 0 + m_BehaviorType: 0 + m_BehaviorName: npc + TeamId: 0 + m_UseChildSensors: 1 + m_UseChildActuators: 1 + m_ObservableAttributeHandling: 0 +--- !u!114 &6638271233700792696 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6171680323948707524} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: a6f2a081cfc8c4b4bb6864331109d147, type: 3} m_Name: m_EditorClassIdentifier: - HealthPoints: 100 - Armour: 100 - Ammunition: 360 - LastTimeHit: 0 + agentParameters: + maxStep: 0 + hasUpgradedFromAgentParameters: 1 + MaxStep: 100 +--- !u!114 &5988018701276467001 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6171680323948707524} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: d3ebcf807a37f344998fd648dfc9376d, type: 3} + m_Name: + m_EditorClassIdentifier: + navMeshAgent: {fileID: 8774702625908438859} + flag: {fileID: 6818223691859422291, guid: 1685c1d9ce4ab174f95c646b1826010b, type: 3} +--- !u!114 &1547882613481475944 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 6171680323948707524} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 3a5c9d521e5ef4759a8246a07d52221e, type: 3} + m_Name: + m_EditorClassIdentifier: + DecisionPeriod: 5 + TakeActionsBetweenDecisions: 0 diff --git a/Assets/Prefabs/DragonFucker.prefab b/Assets/Prefabs/DragonFucker.prefab deleted file mode 100644 index bc71ab1..0000000 --- a/Assets/Prefabs/DragonFucker.prefab +++ /dev/null @@ -1,2744 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!1 &3344194988163342465 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054817} - m_Layer: 0 - m_Name: Index_Distal_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054817 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342465} - m_LocalRotation: {x: 0.012835801, y: 0.013473716, z: 0.08711711, w: 0.99602425} - m_LocalPosition: {x: -7.9785438, y: 1.7053026e-13, z: 4.440892e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054821} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342467 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054819} - m_Layer: 0 - m_Name: Index_Distal_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054819 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342467} - m_LocalRotation: {x: -0.013842603, y: -0.01243883, z: 0.16401216, w: 0.98628277} - m_LocalPosition: {x: -7.9783425, y: 5.684342e-14, z: -1.7763568e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054823} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342469 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054821} - m_Layer: 0 - m_Name: Index_Intermediate_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054821 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342469} - m_LocalRotation: {x: -0.017963478, y: -0.041126773, z: 0.25885317, w: 0.96487355} - m_LocalPosition: {x: -10.023805, y: -1.4210855e-13, z: -1.7763568e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054817} - m_Father: {fileID: 3344194988163054841} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342471 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054823} - m_Layer: 0 - m_Name: Index_Intermediate_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054823 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342471} - m_LocalRotation: {x: 0.017964862, y: 0.041127786, z: 0.25889415, w: 0.96486247} - m_LocalPosition: {x: -10.023557, y: -1.4210855e-13, z: 8.881784e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054819} - m_Father: {fileID: 3344194988163054843} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342473 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054825} - m_Layer: 0 - m_Name: Headgear_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054825 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342473} - m_LocalRotation: {x: 0.47540557, y: -0.44884732, z: 0.5234402, w: 0.54638463} - m_LocalPosition: {x: -8.526513e-14, y: -6.550316e-15, z: -13.487081} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054805} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342475 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054827} - - component: {fileID: 3344194988174912995} - m_Layer: 0 - m_Name: head1 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054827 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342475} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: -176.9416, z: -6.1230974} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 3344194988163054729} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!137 &3344194988174912995 -SkinnedMeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342475} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 0 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 2100000, guid: 1666fb81395f51e4293b01cca8f58481, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 - serializedVersion: 2 - m_Quality: 0 - m_UpdateWhenOffscreen: 0 - m_SkinnedMotionVectors: 1 - m_Mesh: {fileID: 4300000, guid: 5ae220fa0f79ffe41b0102a618febf5a, type: 3} - m_Bones: - - {fileID: 3344194988163054829} - - {fileID: 3344194988163054763} - - {fileID: 3344194988163054815} - - {fileID: 3344194988163054731} - - {fileID: 3344194988163054805} - - {fileID: 3344194988163054825} - - {fileID: 3344194988163054831} - - {fileID: 3344194988163054747} - - {fileID: 3344194988163054771} - - {fileID: 3344194988163054845} - - {fileID: 3344194988163054801} - - {fileID: 3344194988163054727} - - {fileID: 3344194988163054723} - - {fileID: 3344194988163054735} - - {fileID: 3344194988163054843} - - {fileID: 3344194988163054823} - - {fileID: 3344194988163054819} - - {fileID: 3344194988163054753} - - {fileID: 3344194988163054765} - - {fileID: 3344194988163054761} - - {fileID: 3344194988163054743} - - {fileID: 3344194988163054739} - - {fileID: 3344194988163054751} - - {fileID: 3344194988163054745} - - {fileID: 3344194988163054769} - - {fileID: 3344194988163054835} - - {fileID: 3344194988163054807} - - {fileID: 3344194988163054725} - - {fileID: 3344194988163054721} - - {fileID: 3344194988163054733} - - {fileID: 3344194988163054841} - - {fileID: 3344194988163054821} - - {fileID: 3344194988163054817} - - {fileID: 3344194988163054759} - - {fileID: 3344194988163054755} - - {fileID: 3344194988163054767} - - {fileID: 3344194988163054741} - - {fileID: 3344194988163054737} - - {fileID: 3344194988163054749} - - {fileID: 3344194988163054789} - - {fileID: 3344194988163054793} - - {fileID: 3344194988163054797} - - {fileID: 3344194988163054799} - - {fileID: 3344194988163054775} - - {fileID: 3344194988163054833} - - {fileID: 3344194988163054813} - - {fileID: 3344194988163054757} - - {fileID: 3344194988163054777} - - {fileID: 3344194988163054773} - - {fileID: 3344194988163054839} - - {fileID: 3344194988163054803} - - {fileID: 3344194988163054779} - - {fileID: 3344194988163054783} - - {fileID: 3344194988163054785} - - {fileID: 3344194988163054837} - - {fileID: 3344194988163054781} - - {fileID: 3344194988163054787} - m_BlendShapeWeights: [] - m_RootBone: {fileID: 3344194988163054829} - m_AABB: - m_Center: {x: -71.52329, y: 10.616158, z: -1.0935726} - m_Extent: {x: 26.513672, y: 26.529358, z: 17.550735} - m_DirtyAABB: 0 ---- !u!1 &3344194988163342477 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054829} - m_Layer: 0 - m_Name: Hips - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054829 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342477} - m_LocalRotation: {x: -0.4582423, y: 0.5385295, z: -0.4582423, w: 0.5385295} - m_LocalPosition: {x: -0, y: 1.0650933, z: 0.038611155} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: - - {fileID: 3344194988163054787} - - {fileID: 3344194988163054785} - - {fileID: 3344194988163054763} - - {fileID: 3344194988163054775} - - {fileID: 3344194988163054773} - m_Father: {fileID: 3344194988163054729} - m_RootOrder: 5 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342479 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054831} - m_Layer: 0 - m_Name: Headgear_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054831 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342479} - m_LocalRotation: {x: -0.5234401, y: 0.5463846, z: 0.4754055, w: 0.44884726} - m_LocalPosition: {x: 1.3088212, y: 3.871166, z: 12.853171} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054805} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342481 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054833} - m_Layer: 0 - m_Name: LowerLeg_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054833 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342481} - m_LocalRotation: {x: -0.6640286, y: -0.051458772, z: -0.042404156, w: 0.74472815} - m_LocalPosition: {x: -33.436848, y: -2.4868996e-14, z: 6.217249e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054813} - m_Father: {fileID: 3344194988163054775} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342483 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054835} - m_Layer: 0 - m_Name: LowerArm_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054835 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342483} - m_LocalRotation: {x: 4.9720758e-29, y: -0.049585804, z: 1.0024346e-27, w: 0.9987699} - m_LocalPosition: {x: -33.666927, y: 0, z: -1.4432899e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054807} - m_Father: {fileID: 3344194988163054769} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342485 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054837} - m_Layer: 0 - m_Name: magazine_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054837 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342485} - m_LocalRotation: {x: -0.4881349, y: 0.5173824, z: 0.48743477, w: 0.5064061} - m_LocalPosition: {x: -0.004472253, y: -40.147858, z: 0.03500123} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054785} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342487 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054839} - m_Layer: 0 - m_Name: LowerLeg_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054839 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342487} - m_LocalRotation: {x: 0.66088396, y: 0.05163587, z: -0.04218814, w: 0.74752015} - m_LocalPosition: {x: -33.437313, y: 4.973799e-14, z: -8.881784e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054803} - m_Father: {fileID: 3344194988163054773} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342489 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054841} - m_Layer: 0 - m_Name: Index_Proximal_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054841 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342489} - m_LocalRotation: {x: -0.0028644414, y: -0.042128764, z: 0.08165133, w: 0.99576604} - m_LocalPosition: {x: -13.357139, y: -0.09103328, z: -13.440055} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054821} - m_Father: {fileID: 3344194988163054807} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342491 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054843} - m_Layer: 0 - m_Name: Index_Proximal_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054843 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342491} - m_LocalRotation: {x: 0.0028642316, y: 0.04212831, z: 0.08163239, w: 0.9957676} - m_LocalPosition: {x: -13.357445, y: -0.09144714, z: 13.440094} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054823} - m_Father: {fileID: 3344194988163054801} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342493 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054845} - m_Layer: 0 - m_Name: LowerArm_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054845 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342493} - m_LocalRotation: {x: -1.11818e-29, y: 0.049585894, z: 1.0904029e-28, w: 0.9987699} - m_LocalPosition: {x: -33.66694, y: 2.842171e-14, z: 4.2188475e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054801} - m_Father: {fileID: 3344194988163054771} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342495 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054847} - - component: {fileID: 3344194988174912993} - m_Layer: 0 - m_Name: Leg1 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054847 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342495} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: -96.69518, z: -8.197696} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 3344194988163054729} - m_RootOrder: 6 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!137 &3344194988174912993 -SkinnedMeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342495} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 0 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 2100000, guid: 1666fb81395f51e4293b01cca8f58481, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 - serializedVersion: 2 - m_Quality: 0 - m_UpdateWhenOffscreen: 0 - m_SkinnedMotionVectors: 1 - m_Mesh: {fileID: 4300008, guid: 5ae220fa0f79ffe41b0102a618febf5a, type: 3} - m_Bones: - - {fileID: 3344194988163054829} - - {fileID: 3344194988163054763} - - {fileID: 3344194988163054815} - - {fileID: 3344194988163054731} - - {fileID: 3344194988163054805} - - {fileID: 3344194988163054825} - - {fileID: 3344194988163054831} - - {fileID: 3344194988163054747} - - {fileID: 3344194988163054771} - - {fileID: 3344194988163054845} - - {fileID: 3344194988163054801} - - {fileID: 3344194988163054727} - - {fileID: 3344194988163054723} - - {fileID: 3344194988163054735} - - {fileID: 3344194988163054843} - - {fileID: 3344194988163054823} - - {fileID: 3344194988163054819} - - {fileID: 3344194988163054753} - - {fileID: 3344194988163054765} - - {fileID: 3344194988163054761} - - {fileID: 3344194988163054743} - - {fileID: 3344194988163054739} - - {fileID: 3344194988163054751} - - {fileID: 3344194988163054745} - - {fileID: 3344194988163054769} - - {fileID: 3344194988163054835} - - {fileID: 3344194988163054807} - - {fileID: 3344194988163054725} - - {fileID: 3344194988163054721} - - {fileID: 3344194988163054733} - - {fileID: 3344194988163054841} - - {fileID: 3344194988163054821} - - {fileID: 3344194988163054817} - - {fileID: 3344194988163054759} - - {fileID: 3344194988163054755} - - {fileID: 3344194988163054767} - - {fileID: 3344194988163054741} - - {fileID: 3344194988163054737} - - {fileID: 3344194988163054749} - - {fileID: 3344194988163054789} - - {fileID: 3344194988163054793} - - {fileID: 3344194988163054797} - - {fileID: 3344194988163054799} - - {fileID: 3344194988163054775} - - {fileID: 3344194988163054833} - - {fileID: 3344194988163054813} - - {fileID: 3344194988163054757} - - {fileID: 3344194988163054777} - - {fileID: 3344194988163054773} - - {fileID: 3344194988163054839} - - {fileID: 3344194988163054803} - - {fileID: 3344194988163054779} - - {fileID: 3344194988163054783} - - {fileID: 3344194988163054785} - - {fileID: 3344194988163054837} - - {fileID: 3344194988163054781} - - {fileID: 3344194988163054787} - m_BlendShapeWeights: [] - m_RootBone: {fileID: 3344194988163054829} - m_AABB: - m_Center: {x: 53.24073, y: -15.325765, z: 0.17397118} - m_Extent: {x: 69.29029, y: 36.12146, z: 51.77101} - m_DirtyAABB: 0 ---- !u!1 &3344194988163342497 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054721} - m_Layer: 0 - m_Name: RestOfFingers_Intermediate_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054721 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342497} - m_LocalRotation: {x: -0.0061721927, y: -0.012479491, z: 0.3441051, w: 0.93882793} - m_LocalPosition: {x: -8.025833, y: -1.7053026e-13, z: 4.8849813e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054733} - m_Father: {fileID: 3344194988163054725} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342499 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054723} - m_Layer: 0 - m_Name: RestOfFingers_Intermediate_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054723 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342499} - m_LocalRotation: {x: 0.0061717043, y: 0.012478555, z: 0.3440602, w: 0.9388444} - m_LocalPosition: {x: -8.02586, y: -1.1368684e-13, z: -5.3290705e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054735} - m_Father: {fileID: 3344194988163054727} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342501 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054725} - m_Layer: 0 - m_Name: RestOfFingers_Proximal_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054725 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342501} - m_LocalRotation: {x: -0.0028322286, y: -0.060177784, z: 0.060808677, w: 0.9963297} - m_LocalPosition: {x: -18.094389, y: 8.526513e-14, z: -1.0658141e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054721} - m_Father: {fileID: 3344194988163054807} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342503 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054727} - m_Layer: 0 - m_Name: RestOfFingers_Proximal_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054727 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342503} - m_LocalRotation: {x: 0.0028340812, y: 0.060178064, z: 0.060824323, w: 0.9963288} - m_LocalPosition: {x: -18.094444, y: 5.684342e-14, z: 1.7763568e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054723} - m_Father: {fileID: 3344194988163054801} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342505 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054729} - - component: {fileID: 3344194989366231820} - - component: {fileID: 663001753118162388} - - component: {fileID: 663001753118162387} - - component: {fileID: 663001753118162386} - m_Layer: 0 - m_Name: DragonFucker - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054729 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342505} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 13.8, y: 1.22, z: -11.53} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054795} - - {fileID: 3344194988163054791} - - {fileID: 3344194988163054811} - - {fileID: 3344194988163054809} - - {fileID: 3344194988163054827} - - {fileID: 3344194988163054829} - - {fileID: 3344194988163054847} - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!95 &3344194989366231820 -Animator: - serializedVersion: 3 - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342505} - m_Enabled: 1 - m_Avatar: {fileID: 9000000, guid: 860793eb3324391468f1c120a75ec049, type: 3} - m_Controller: {fileID: 9100000, guid: 95a44e56d04c7d248ba723eda9611c51, type: 2} - m_CullingMode: 0 - m_UpdateMode: 0 - m_ApplyRootMotion: 0 - m_LinearVelocityBlending: 0 - m_WarningMessage: - m_HasTransformHierarchy: 1 - m_AllowConstantClipSamplingOptimization: 1 - m_KeepAnimatorControllerStateOnDisable: 0 ---- !u!64 &663001753118162388 -MeshCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342505} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 4 - m_Convex: 0 - m_CookingOptions: 30 - m_Mesh: {fileID: 0} ---- !u!136 &663001753118162387 -CapsuleCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342505} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.5 - m_Height: 1 - m_Direction: 1 - m_Center: {x: 0, y: 1, z: 0} ---- !u!114 &663001753118162386 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342505} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 9826297ef4d853741b2af768441ec7f7, type: 3} - m_Name: - m_EditorClassIdentifier: - input_View: {x: 0, y: 0} - cameraHolder: {fileID: 0} - feetTransform: {fileID: 0} - playerSettings: - ViewXSensetivity: 0 - ViewYSensetivity: 0 - ViewXInverted: 0 - ViewYInverted: 0 - SprintingHold: 0 - MovementSmoothing: 0 - RunningForwardSpeed: 0 - RunningStrafeSpeed: 0 - WalkingForwardSpeed: 0 - WalkingBackwardSpeed: 0 - WalkingStrafeSpeed: 0 - JumpingHeight: 0 - JumpingFalloff: 0 - FallingSmoothing: 0 - SpeedEffector: 1 - CrouchSpeedEffector: 0 - ProneSpeedEffector: 0 - FallingSpeedEffector: 0 - ViewClampYMin: -70 - ViewClampYMax: 80 - playerMask: - serializedVersion: 2 - m_Bits: 0 - gravityAmount: 0 - gravityMin: 0 - jumpingForce: {x: 0, y: 0, z: 0} - playerStance: 0 - playerStanceSmoothing: 0 - playerStandStance: - CameraHeight: 0 - StanceCollider: {fileID: 0} - playerCrouchStance: - CameraHeight: 0 - StanceCollider: {fileID: 0} - playerProneStance: - CameraHeight: 0 - StanceCollider: {fileID: 0} - currentWeapon: {fileID: 0} ---- !u!1 &3344194988163342507 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054731} - m_Layer: 0 - m_Name: Neck - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054731 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342507} - m_LocalRotation: {x: 0.9631089, y: -0.26911193, z: 1.6478353e-17, w: -5.897341e-17} - m_LocalPosition: {x: -17.7523, y: -2.842171e-14, z: 0.000000026610966} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054805} - m_Father: {fileID: 3344194988163054815} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342509 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054733} - m_Layer: 0 - m_Name: RestOfFingers_Distal_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054733 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342509} - m_LocalRotation: {x: 0.0041774614, y: 0.0038508072, z: 0.015434083, w: 0.99986476} - m_LocalPosition: {x: -8.174185, y: 2.842171e-14, z: 2.6645353e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054721} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342511 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054735} - m_Layer: 0 - m_Name: RestOfFingers_Distal_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054735 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342511} - m_LocalRotation: {x: -0.004532425, y: -0.003425033, z: 0.11285209, w: 0.9935956} - m_LocalPosition: {x: -8.173605, y: -8.526513e-14, z: -3.5527137e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054723} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342513 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054737} - m_Layer: 0 - m_Name: ShoulderPadBody_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054737 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342513} - m_LocalRotation: {x: -0.0066352007, y: 0.011333432, z: 0.50519216, w: 0.86290693} - m_LocalPosition: {x: -16.02375, y: -6.3948846e-14, z: 1.2878587e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054741} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342515 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054739} - m_Layer: 0 - m_Name: ShoulderPadBody_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054739 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342515} - m_LocalRotation: {x: 0.0066350633, y: -0.011333145, z: 0.5051939, w: 0.862906} - m_LocalPosition: {x: -16.023825, y: -1.4210855e-14, z: 2.6645353e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054743} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342517 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054741} - m_Layer: 0 - m_Name: ShoulderPadCTRL_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054741 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342517} - m_LocalRotation: {x: -0.039328646, y: 0.10945006, z: -0.6506794, w: 0.7503934} - m_LocalPosition: {x: -9.938715, y: 14.657999, z: 1.039447} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054749} - - {fileID: 3344194988163054737} - m_Father: {fileID: 3344194988163054745} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342519 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054743} - m_Layer: 0 - m_Name: ShoulderPadCTRL_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054743 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342519} - m_LocalRotation: {x: 0.039328095, y: -0.109450735, z: -0.65068716, w: 0.75038666} - m_LocalPosition: {x: -9.938806, y: 14.657373, z: -1.0394562} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054751} - - {fileID: 3344194988163054739} - m_Father: {fileID: 3344194988163054747} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342521 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054745} - m_Layer: 0 - m_Name: Shoulder_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054745 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342521} - m_LocalRotation: {x: -0.29884863, y: 0.73412436, z: -0.41826612, w: -0.4436265} - m_LocalPosition: {x: -12.348376, y: -6.29761, z: -18.7061} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054741} - - {fileID: 3344194988163054769} - m_Father: {fileID: 3344194988163054815} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342523 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054747} - m_Layer: 0 - m_Name: Shoulder_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054747 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342523} - m_LocalRotation: {x: -0.2988425, y: 0.7341269, z: 0.41826975, w: 0.4436229} - m_LocalPosition: {x: -12.348807, y: -6.29741, z: 18.706108} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054743} - - {fileID: 3344194988163054771} - m_Father: {fileID: 3344194988163054815} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342525 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054749} - m_Layer: 0 - m_Name: ShoulderPadBlade_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054749 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342525} - m_LocalRotation: {x: -0.0066352007, y: 0.011333432, z: 0.50519216, w: 0.86290693} - m_LocalPosition: {x: -13.296815, y: -22.177006, z: 0.5073527} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054741} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342527 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054751} - m_Layer: 0 - m_Name: ShoulderPadBlade_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054751 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342527} - m_LocalRotation: {x: 0.0066350633, y: -0.011333145, z: 0.5051939, w: 0.862906} - m_LocalPosition: {x: -13.296959, y: -22.176832, z: -0.5073402} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054743} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342561 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054785} - m_Layer: 0 - m_Name: ArmPosition_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054785 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342561} - m_LocalRotation: {x: 0.0025357183, y: -0.0073521743, z: -0.47313836, w: 0.88095385} - m_LocalPosition: {x: -2.423428, y: -55.614994, z: 120.53} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054837} - - {fileID: 3344194988163054781} - m_Father: {fileID: 3344194988163054829} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342563 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054787} - m_Layer: 0 - m_Name: ArmPosition_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054787 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342563} - m_LocalRotation: {x: 0.0073521743, y: 0.002535718, z: 0.88095385, w: 0.47313833} - m_LocalPosition: {x: -2.423428, y: -55.614994, z: -120.53} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054829} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342565 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054789} - m_Layer: 0 - m_Name: BackPack - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054789 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342565} - m_LocalRotation: {x: -0.3617453, y: 0.5306367, z: 0.3968756, w: 0.6557856} - m_LocalPosition: {x: -10.219411, y: -38.523506, z: 0.00000010329652} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054793} - - {fileID: 3344194988163054799} - - {fileID: 3344194988163054797} - m_Father: {fileID: 3344194988163054815} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342567 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054791} - - component: {fileID: 3344194988174913001} - m_Layer: 0 - m_Name: AssaultRifle - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054791 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342567} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -118.131454, y: -98.01, z: -57.969975} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 3344194988163054729} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!137 &3344194988174913001 -SkinnedMeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342567} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 0 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 2100000, guid: 1666fb81395f51e4293b01cca8f58481, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 - serializedVersion: 2 - m_Quality: 0 - m_UpdateWhenOffscreen: 0 - m_SkinnedMotionVectors: 1 - m_Mesh: {fileID: 4300010, guid: 5ae220fa0f79ffe41b0102a618febf5a, type: 3} - m_Bones: - - {fileID: 3344194988163054829} - - {fileID: 3344194988163054763} - - {fileID: 3344194988163054815} - - {fileID: 3344194988163054731} - - {fileID: 3344194988163054805} - - {fileID: 3344194988163054825} - - {fileID: 3344194988163054831} - - {fileID: 3344194988163054747} - - {fileID: 3344194988163054771} - - {fileID: 3344194988163054845} - - {fileID: 3344194988163054801} - - {fileID: 3344194988163054727} - - {fileID: 3344194988163054723} - - {fileID: 3344194988163054735} - - {fileID: 3344194988163054843} - - {fileID: 3344194988163054823} - - {fileID: 3344194988163054819} - - {fileID: 3344194988163054753} - - {fileID: 3344194988163054765} - - {fileID: 3344194988163054761} - - {fileID: 3344194988163054743} - - {fileID: 3344194988163054739} - - {fileID: 3344194988163054751} - - {fileID: 3344194988163054745} - - {fileID: 3344194988163054769} - - {fileID: 3344194988163054835} - - {fileID: 3344194988163054807} - - {fileID: 3344194988163054725} - - {fileID: 3344194988163054721} - - {fileID: 3344194988163054733} - - {fileID: 3344194988163054841} - - {fileID: 3344194988163054821} - - {fileID: 3344194988163054817} - - {fileID: 3344194988163054759} - - {fileID: 3344194988163054755} - - {fileID: 3344194988163054767} - - {fileID: 3344194988163054741} - - {fileID: 3344194988163054737} - - {fileID: 3344194988163054749} - - {fileID: 3344194988163054789} - - {fileID: 3344194988163054793} - - {fileID: 3344194988163054797} - - {fileID: 3344194988163054799} - - {fileID: 3344194988163054775} - - {fileID: 3344194988163054833} - - {fileID: 3344194988163054813} - - {fileID: 3344194988163054757} - - {fileID: 3344194988163054777} - - {fileID: 3344194988163054773} - - {fileID: 3344194988163054839} - - {fileID: 3344194988163054803} - - {fileID: 3344194988163054779} - - {fileID: 3344194988163054783} - - {fileID: 3344194988163054785} - - {fileID: 3344194988163054837} - - {fileID: 3344194988163054781} - - {fileID: 3344194988163054787} - m_BlendShapeWeights: [] - m_RootBone: {fileID: 3344194988163054829} - m_AABB: - m_Center: {x: -34.27955, y: -49.079704, z: 120.48372} - m_Extent: {x: 95.48148, y: 94.6697, z: 10.629513} - m_DirtyAABB: 0 ---- !u!1 &3344194988163342569 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054793} - m_Layer: 0 - m_Name: ArmPlacement_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054793 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342569} - m_LocalRotation: {x: -0.002378591, y: 0.08787313, z: 0.02695381, w: 0.99576414} - m_LocalPosition: {x: -44.450283, y: 2.842171e-14, z: -2.1316282e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054789} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342571 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054795} - - component: {fileID: 3344194988174913003} - m_Layer: 0 - m_Name: Arm1 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054795 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342571} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: -163.22968, z: -0.3527179} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 3344194988163054729} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!137 &3344194988174913003 -SkinnedMeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342571} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 0 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 2100000, guid: 1666fb81395f51e4293b01cca8f58481, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 - serializedVersion: 2 - m_Quality: 0 - m_UpdateWhenOffscreen: 0 - m_SkinnedMotionVectors: 1 - m_Mesh: {fileID: 4300006, guid: 5ae220fa0f79ffe41b0102a618febf5a, type: 3} - m_Bones: - - {fileID: 3344194988163054829} - - {fileID: 3344194988163054763} - - {fileID: 3344194988163054815} - - {fileID: 3344194988163054731} - - {fileID: 3344194988163054805} - - {fileID: 3344194988163054825} - - {fileID: 3344194988163054831} - - {fileID: 3344194988163054747} - - {fileID: 3344194988163054771} - - {fileID: 3344194988163054845} - - {fileID: 3344194988163054801} - - {fileID: 3344194988163054727} - - {fileID: 3344194988163054723} - - {fileID: 3344194988163054735} - - {fileID: 3344194988163054843} - - {fileID: 3344194988163054823} - - {fileID: 3344194988163054819} - - {fileID: 3344194988163054753} - - {fileID: 3344194988163054765} - - {fileID: 3344194988163054761} - - {fileID: 3344194988163054743} - - {fileID: 3344194988163054739} - - {fileID: 3344194988163054751} - - {fileID: 3344194988163054745} - - {fileID: 3344194988163054769} - - {fileID: 3344194988163054835} - - {fileID: 3344194988163054807} - - {fileID: 3344194988163054725} - - {fileID: 3344194988163054721} - - {fileID: 3344194988163054733} - - {fileID: 3344194988163054841} - - {fileID: 3344194988163054821} - - {fileID: 3344194988163054817} - - {fileID: 3344194988163054759} - - {fileID: 3344194988163054755} - - {fileID: 3344194988163054767} - - {fileID: 3344194988163054741} - - {fileID: 3344194988163054737} - - {fileID: 3344194988163054749} - - {fileID: 3344194988163054789} - - {fileID: 3344194988163054793} - - {fileID: 3344194988163054797} - - {fileID: 3344194988163054799} - - {fileID: 3344194988163054775} - - {fileID: 3344194988163054833} - - {fileID: 3344194988163054813} - - {fileID: 3344194988163054757} - - {fileID: 3344194988163054777} - - {fileID: 3344194988163054773} - - {fileID: 3344194988163054839} - - {fileID: 3344194988163054803} - - {fileID: 3344194988163054779} - - {fileID: 3344194988163054783} - - {fileID: 3344194988163054785} - - {fileID: 3344194988163054837} - - {fileID: 3344194988163054781} - - {fileID: 3344194988163054787} - m_BlendShapeWeights: [] - m_RootBone: {fileID: 3344194988163054829} - m_AABB: - m_Center: {x: -58.594677, y: 13.632675, z: 0.00010681152} - m_Extent: {x: 29.436275, y: 28.906914, z: 140.37485} - m_DirtyAABB: 0 ---- !u!1 &3344194988163342573 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054797} - m_Layer: 0 - m_Name: ArmPlacement_Upper - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054797 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342573} - m_LocalRotation: {x: -0.002378591, y: 0.08787313, z: 0.02695381, w: 0.99576414} - m_LocalPosition: {x: -0.56803536, y: 46.34834, z: 10.906936} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054789} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342575 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054799} - m_Layer: 0 - m_Name: ArmPlacement_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054799 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342575} - m_LocalRotation: {x: -0.02695381, y: 0.99576414, z: -0.0023785909, w: -0.08787313} - m_LocalPosition: {x: 41.471436, y: 4.6548405, z: -15.306103} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054789} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342577 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054801} - m_Layer: 0 - m_Name: Hand_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054801 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342577} - m_LocalRotation: {x: 0.0005932963, y: -0.08528753, z: -0.006930911, w: 0.9963321} - m_LocalPosition: {x: -37.441498, y: 8.526513e-14, z: 2.7533531e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054843} - - {fileID: 3344194988163054727} - - {fileID: 3344194988163054753} - m_Father: {fileID: 3344194988163054845} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342579 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054803} - m_Layer: 0 - m_Name: Foot_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054803 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342579} - m_LocalRotation: {x: 0.81892335, y: 0.5656828, z: 0.024710972, w: -0.09357782} - m_LocalPosition: {x: -62.73464, y: -9.769963e-15, z: 2.4868996e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054779} - m_Father: {fileID: 3344194988163054839} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342581 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054805} - m_Layer: 0 - m_Name: Head - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054805 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342581} - m_LocalRotation: {x: 0.071218304, y: -0.028954746, z: -0.059960153, w: 0.99523586} - m_LocalPosition: {x: -13.255574, y: -1.4210855e-14, z: 7.364747e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054825} - - {fileID: 3344194988163054831} - m_Father: {fileID: 3344194988163054731} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342583 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054807} - m_Layer: 0 - m_Name: Hand_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054807 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342583} - m_LocalRotation: {x: -0.0005926622, y: 0.085287675, z: -0.0069382307, w: 0.99633205} - m_LocalPosition: {x: -37.441414, y: 1.7053026e-13, z: -5.3290705e-15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054841} - - {fileID: 3344194988163054725} - - {fileID: 3344194988163054759} - m_Father: {fileID: 3344194988163054835} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342585 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054809} - - component: {fileID: 3344194988174913005} - m_Layer: 0 - m_Name: Body1 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054809 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342585} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0.0000018693923, y: -129.49721, z: -6.0212374} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 3344194988163054729} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!137 &3344194988174913005 -SkinnedMeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342585} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 0 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 2100000, guid: 1666fb81395f51e4293b01cca8f58481, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 - serializedVersion: 2 - m_Quality: 0 - m_UpdateWhenOffscreen: 0 - m_SkinnedMotionVectors: 1 - m_Mesh: {fileID: 4300002, guid: 5ae220fa0f79ffe41b0102a618febf5a, type: 3} - m_Bones: - - {fileID: 3344194988163054829} - - {fileID: 3344194988163054763} - - {fileID: 3344194988163054815} - - {fileID: 3344194988163054731} - - {fileID: 3344194988163054805} - - {fileID: 3344194988163054825} - - {fileID: 3344194988163054831} - - {fileID: 3344194988163054747} - - {fileID: 3344194988163054771} - - {fileID: 3344194988163054845} - - {fileID: 3344194988163054801} - - {fileID: 3344194988163054727} - - {fileID: 3344194988163054723} - - {fileID: 3344194988163054735} - - {fileID: 3344194988163054843} - - {fileID: 3344194988163054823} - - {fileID: 3344194988163054819} - - {fileID: 3344194988163054753} - - {fileID: 3344194988163054765} - - {fileID: 3344194988163054761} - - {fileID: 3344194988163054743} - - {fileID: 3344194988163054739} - - {fileID: 3344194988163054751} - - {fileID: 3344194988163054745} - - {fileID: 3344194988163054769} - - {fileID: 3344194988163054835} - - {fileID: 3344194988163054807} - - {fileID: 3344194988163054725} - - {fileID: 3344194988163054721} - - {fileID: 3344194988163054733} - - {fileID: 3344194988163054841} - - {fileID: 3344194988163054821} - - {fileID: 3344194988163054817} - - {fileID: 3344194988163054759} - - {fileID: 3344194988163054755} - - {fileID: 3344194988163054767} - - {fileID: 3344194988163054741} - - {fileID: 3344194988163054737} - - {fileID: 3344194988163054749} - - {fileID: 3344194988163054789} - - {fileID: 3344194988163054793} - - {fileID: 3344194988163054797} - - {fileID: 3344194988163054799} - - {fileID: 3344194988163054775} - - {fileID: 3344194988163054833} - - {fileID: 3344194988163054813} - - {fileID: 3344194988163054757} - - {fileID: 3344194988163054777} - - {fileID: 3344194988163054773} - - {fileID: 3344194988163054839} - - {fileID: 3344194988163054803} - - {fileID: 3344194988163054779} - - {fileID: 3344194988163054783} - - {fileID: 3344194988163054785} - - {fileID: 3344194988163054837} - - {fileID: 3344194988163054781} - - {fileID: 3344194988163054787} - m_BlendShapeWeights: [] - m_RootBone: {fileID: 3344194988163054829} - m_AABB: - m_Center: {x: -31.982391, y: 2.5145874, z: 0.0000019073486} - m_Extent: {x: 51.12609, y: 46.604404, z: 27.749142} - m_DirtyAABB: 0 ---- !u!1 &3344194988163342587 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054811} - - component: {fileID: 3344194988174913007} - m_Layer: 0 - m_Name: Backpack1 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054811 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342587} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: -138.49533, z: 34.48629} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 3344194988163054729} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!137 &3344194988174913007 -SkinnedMeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342587} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 0 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 2100000, guid: 1666fb81395f51e4293b01cca8f58481, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 - serializedVersion: 2 - m_Quality: 0 - m_UpdateWhenOffscreen: 0 - m_SkinnedMotionVectors: 1 - m_Mesh: {fileID: 4300004, guid: 5ae220fa0f79ffe41b0102a618febf5a, type: 3} - m_Bones: - - {fileID: 3344194988163054829} - - {fileID: 3344194988163054763} - - {fileID: 3344194988163054815} - - {fileID: 3344194988163054731} - - {fileID: 3344194988163054805} - - {fileID: 3344194988163054825} - - {fileID: 3344194988163054831} - - {fileID: 3344194988163054747} - - {fileID: 3344194988163054771} - - {fileID: 3344194988163054845} - - {fileID: 3344194988163054801} - - {fileID: 3344194988163054727} - - {fileID: 3344194988163054723} - - {fileID: 3344194988163054735} - - {fileID: 3344194988163054843} - - {fileID: 3344194988163054823} - - {fileID: 3344194988163054819} - - {fileID: 3344194988163054753} - - {fileID: 3344194988163054765} - - {fileID: 3344194988163054761} - - {fileID: 3344194988163054743} - - {fileID: 3344194988163054739} - - {fileID: 3344194988163054751} - - {fileID: 3344194988163054745} - - {fileID: 3344194988163054769} - - {fileID: 3344194988163054835} - - {fileID: 3344194988163054807} - - {fileID: 3344194988163054725} - - {fileID: 3344194988163054721} - - {fileID: 3344194988163054733} - - {fileID: 3344194988163054841} - - {fileID: 3344194988163054821} - - {fileID: 3344194988163054817} - - {fileID: 3344194988163054759} - - {fileID: 3344194988163054755} - - {fileID: 3344194988163054767} - - {fileID: 3344194988163054741} - - {fileID: 3344194988163054737} - - {fileID: 3344194988163054749} - - {fileID: 3344194988163054789} - - {fileID: 3344194988163054793} - - {fileID: 3344194988163054797} - - {fileID: 3344194988163054799} - - {fileID: 3344194988163054775} - - {fileID: 3344194988163054833} - - {fileID: 3344194988163054813} - - {fileID: 3344194988163054757} - - {fileID: 3344194988163054777} - - {fileID: 3344194988163054773} - - {fileID: 3344194988163054839} - - {fileID: 3344194988163054803} - - {fileID: 3344194988163054779} - - {fileID: 3344194988163054783} - - {fileID: 3344194988163054785} - - {fileID: 3344194988163054837} - - {fileID: 3344194988163054781} - - {fileID: 3344194988163054787} - m_BlendShapeWeights: [] - m_RootBone: {fileID: 3344194988163054829} - m_AABB: - m_Center: {x: -28.89946, y: 44.769905, z: -1.5718498} - m_Extent: {x: 47.82257, y: 32.29384, z: 42.822285} - m_DirtyAABB: 0 ---- !u!1 &3344194988163342589 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054813} - m_Layer: 0 - m_Name: Foot_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054813 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342589} - m_LocalRotation: {x: 0.8193084, y: 0.56578285, z: -0.02233458, w: 0.09013736} - m_LocalPosition: {x: -62.734695, y: 2.842171e-14, z: -1.7763568e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054757} - m_Father: {fileID: 3344194988163054833} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342591 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054815} - m_Layer: 0 - m_Name: Chest - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054815 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342591} - m_LocalRotation: {x: 3.1378165e-33, y: -5.551116e-17, z: 0.18862787, w: 0.9820487} - m_LocalPosition: {x: -24.042006, y: -1.0658141e-14, z: 1.9451509e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054789} - - {fileID: 3344194988163054731} - - {fileID: 3344194988163054747} - - {fileID: 3344194988163054745} - m_Father: {fileID: 3344194988163054763} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342657 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054753} - m_Layer: 0 - m_Name: Thumb_Proximal_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054753 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342657} - m_LocalRotation: {x: 0.057522308, y: 0.49499637, z: 0.112048574, w: 0.8597179} - m_LocalPosition: {x: -0.2922163, y: -0.5636321, z: 12.295864} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054765} - m_Father: {fileID: 3344194988163054801} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342659 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054755} - m_Layer: 0 - m_Name: Thumb_Intermediate_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054755 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342659} - m_LocalRotation: {x: 0.048999686, y: 0.1354733, z: 0.1004816, w: 0.9844539} - m_LocalPosition: {x: -9.836, y: -5.684342e-14, z: -5.684342e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054767} - m_Father: {fileID: 3344194988163054759} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342661 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054757} - m_Layer: 0 - m_Name: Toe_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054757 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342661} - m_LocalRotation: {x: 0.0013659683, y: 0.0051824837, z: -0.24785845, w: 0.9687815} - m_LocalPosition: {x: -13.771131, y: 1.9539925e-14, z: -1.0658141e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054777} - m_Father: {fileID: 3344194988163054813} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342663 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054759} - m_Layer: 0 - m_Name: Thumb_Proximal_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054759 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342663} - m_LocalRotation: {x: -0.057529792, y: -0.49499094, z: 0.11207554, w: 0.859717} - m_LocalPosition: {x: -0.29218963, y: -0.56312394, z: -12.29586} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054755} - m_Father: {fileID: 3344194988163054807} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342665 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054761} - m_Layer: 0 - m_Name: Thumb_Distal_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054761 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342665} - m_LocalRotation: {x: -0.017883137, y: -0.03974522, z: -0.025908269, w: 0.99871385} - m_LocalPosition: {x: -8.816608, y: 0, z: 1.4210855e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054765} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342667 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054763} - m_Layer: 0 - m_Name: Spine - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054763 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342667} - m_LocalRotation: {x: 0.99452215, y: -0.104526356, z: -7.7878193e-10, w: 0.0000000074097675} - m_LocalPosition: {x: -21.090727, y: -8.881784e-15, z: -4.4013775e-16} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054815} - m_Father: {fileID: 3344194988163054829} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342669 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054765} - m_Layer: 0 - m_Name: Thumb_Intermediate_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054765 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342669} - m_LocalRotation: {x: -0.04900198, y: -0.13549508, z: 0.10049181, w: 0.98444974} - m_LocalPosition: {x: -9.835772, y: -1.1368684e-13, z: -2.842171e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054761} - m_Father: {fileID: 3344194988163054753} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342671 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054767} - m_Layer: 0 - m_Name: Thumb_Distal_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054767 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342671} - m_LocalRotation: {x: 0.0017341404, y: 0.008320127, z: -0.005799853, w: 0.9999471} - m_LocalPosition: {x: -8.816269, y: 0, z: -1.4210855e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054755} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342673 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054769} - m_Layer: 0 - m_Name: UpperArm_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054769 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342673} - m_LocalRotation: {x: 0.029697837, y: 0.15896995, z: -0.18122736, w: 0.97005326} - m_LocalPosition: {x: -15.111769, y: 0, z: -3.1974423e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054835} - m_Father: {fileID: 3344194988163054745} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342675 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054771} - m_Layer: 0 - m_Name: UpperArm_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054771 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342675} - m_LocalRotation: {x: -0.029700447, y: -0.15896967, z: -0.18123563, w: 0.97005165} - m_LocalPosition: {x: -15.111847, y: 0, z: -3.7303494e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054845} - m_Father: {fileID: 3344194988163054747} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342677 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054773} - m_Layer: 0 - m_Name: UpperLeg_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054773 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342677} - m_LocalRotation: {x: 0.03061261, y: 0.6954094, z: 0.7139454, w: 0.07583304} - m_LocalPosition: {x: 0.084786385, y: 0.40212917, z: 18.8642} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054839} - m_Father: {fileID: 3344194988163054829} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342679 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054775} - m_Layer: 0 - m_Name: UpperLeg_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054775 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342679} - m_LocalRotation: {x: -0.030611672, y: -0.6954005, z: 0.7139541, w: 0.07583354} - m_LocalPosition: {x: 0.08523814, y: 0.40205857, z: -18.864191} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054833} - m_Father: {fileID: 3344194988163054829} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342681 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054777} - m_Layer: 0 - m_Name: Toetip_Left - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054777 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342681} - m_LocalRotation: {x: 0.00016529544, y: -0.016936114, z: -0.0047631934, w: 0.99984527} - m_LocalPosition: {x: -17.883871, y: 3.5527137e-15, z: 2.1316282e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054757} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342683 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054779} - m_Layer: 0 - m_Name: Toe_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054779 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342683} - m_LocalRotation: {x: -0.0013655907, y: -0.005181019, z: -0.24785995, w: 0.9687811} - m_LocalPosition: {x: -13.771146, y: 2.1316282e-14, z: 1.0658141e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 3344194988163054783} - m_Father: {fileID: 3344194988163054803} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342685 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054781} - m_Layer: 0 - m_Name: Trigger_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054781 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342685} - m_LocalRotation: {x: -0.0031178175, y: 0.0071248533, z: 0.40088162, w: 0.91609687} - m_LocalPosition: {x: -9.913989, y: -10.731702, z: 9.284221} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054785} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &3344194988163342687 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 3344194988163054783} - m_Layer: 0 - m_Name: Toetip_Right - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &3344194988163054783 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 3344194988163342687} - m_LocalRotation: {x: -0.00028878966, y: 0.016934738, z: -0.012053749, w: 0.99978393} - m_LocalPosition: {x: -17.883856, y: -2.6645353e-15, z: -1.4210855e-14} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 3344194988163054779} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} diff --git a/Assets/Prefabs/Player.prefab b/Assets/Prefabs/Player.prefab index 7dd28e6..31319fc 100755 --- a/Assets/Prefabs/Player.prefab +++ b/Assets/Prefabs/Player.prefab @@ -1,6 +1,6 @@ %YAML 1.1 %TAG !u! tag:unity3d.com,2011: ---- !u!1 &4528203470433968325 +--- !u!1 &5245491127989480125 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} @@ -8,101 +8,18 @@ GameObject: m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - - component: {fileID: 4528203470433968376} - - component: {fileID: 4528203470433968327} - - component: {fileID: 4528203470433968326} - m_Layer: 8 - m_Name: Camera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &4528203470433968376 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470433968325} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0.734, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4528203471293941515} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!20 &4528203470433968327 -Camera: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470433968325} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 1 - m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} - m_projectionMatrixMode: 1 - m_GateFitMode: 2 - m_FOVAxisMode: 0 - m_SensorSize: {x: 36, y: 24} - m_LensShift: {x: 0, y: 0} - m_FocalLength: 50 - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!81 &4528203470433968326 -AudioListener: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470433968325} - m_Enabled: 1 ---- !u!1 &4528203470625763699 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 4528203470625763689} - - component: {fileID: 4528203470625763688} - - component: {fileID: -4942972567661207728} - - component: {fileID: 4528203470625763702} - - component: {fileID: 4528203470625763690} - - component: {fileID: 4528203470625763703} - - component: {fileID: 4528203470625763701} - - component: {fileID: 4528203470625763700} - - component: {fileID: 1061105263471521090} - - component: {fileID: 1809549200} - - component: {fileID: 1809549201} - - component: {fileID: 1809549212} + - component: {fileID: 5245491127989480103} + - component: {fileID: 5245491127989480102} + - component: {fileID: 5583297852527723678} + - component: {fileID: 5245491127989480120} + - component: {fileID: 5245491127989480100} + - component: {fileID: 5245491127989480121} + - component: {fileID: 7254047075221496626} + - component: {fileID: 5182704636738128575} + - component: {fileID: 5220658550450318085} + - component: {fileID: 4264677542023120457} + - component: {fileID: 4715950961444674817} + - component: {fileID: 5653209015090846528} m_Layer: 8 m_Name: Player m_TagString: Defender @@ -110,37 +27,37 @@ GameObject: m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!4 &4528203470625763689 +--- !u!4 &5245491127989480103 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 28.21, y: 14.12, z: 48.395} + m_GameObject: {fileID: 5245491127989480125} + m_LocalRotation: {x: 0, y: 0.7071068, z: 0, w: 0.7071068} + m_LocalPosition: {x: 28.21, y: 10.9, z: 46.67} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - - {fileID: 4528203471293941515} - - {fileID: 4528203471164033737} + - {fileID: 5245491129196666053} + - {fileID: 5245491129603592455} m_Father: {fileID: 0} m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!33 &4528203470625763688 + m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} +--- !u!33 &5245491127989480102 MeshFilter: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} + m_GameObject: {fileID: 5245491127989480125} m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!23 &-4942972567661207728 +--- !u!23 &5583297852527723678 MeshRenderer: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} + m_GameObject: {fileID: 5245491127989480125} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -173,13 +90,13 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!136 &4528203470625763702 +--- !u!136 &5245491127989480120 CapsuleCollider: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} + m_GameObject: {fileID: 5245491127989480125} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 0 @@ -187,13 +104,13 @@ CapsuleCollider: m_Height: 2 m_Direction: 1 m_Center: {x: 0, y: 0, z: 0} ---- !u!136 &4528203470625763690 +--- !u!136 &5245491127989480100 CapsuleCollider: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} + m_GameObject: {fileID: 5245491127989480125} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 0 @@ -201,13 +118,13 @@ CapsuleCollider: m_Height: 1.3 m_Direction: 1 m_Center: {x: 0, y: -0.35, z: 0} ---- !u!136 &4528203470625763703 +--- !u!136 &5245491127989480121 CapsuleCollider: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} + m_GameObject: {fileID: 5245491127989480125} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 1 @@ -215,119 +132,16 @@ CapsuleCollider: m_Height: 0.8 m_Direction: 1 m_Center: {x: 0, y: -0.6, z: 0} ---- !u!143 &4528203470625763701 -CharacterController: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Height: 2 - m_Radius: 0.5 - m_SlopeLimit: 45 - m_StepOffset: 0.3 - m_SkinWidth: 0.08 - m_MinMoveDistance: 0.001 - m_Center: {x: 0, y: 0, z: 0} ---- !u!114 &4528203470625763700 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} - m_Enabled: 0 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 9826297ef4d853741b2af768441ec7f7, type: 3} - m_Name: - m_EditorClassIdentifier: - input_View: {x: 0, y: 0} - cameraHolder: {fileID: 4528203471293941515} - feetTransform: {fileID: 4528203471164033737} - playerSettings: - ViewXSensetivity: 20 - ViewYSensetivity: 20 - ViewXInverted: 0 - ViewYInverted: 0 - SprintingHold: 0 - MovementSmoothing: 0 - RunningForwardSpeed: 10 - RunningStrafeSpeed: 6 - WalkingForwardSpeed: 4 - WalkingBackwardSpeed: 2 - WalkingStrafeSpeed: 3 - JumpingHeight: 6 - JumpingFalloff: 1 - FallingSmoothing: 0 - SpeedEffector: 1 - CrouchSpeedEffector: 0 - ProneSpeedEffector: 0 - FallingSpeedEffector: 0 - ViewClampYMin: -70 - ViewClampYMax: 80 - playerMask: - serializedVersion: 2 - m_Bits: 55 - gravityAmount: 0.05 - gravityMin: -3 - jumpingForce: {x: 0, y: 0, z: 0} - playerStance: 0 - playerStanceSmoothing: 0.2 - playerStandStance: - CameraHeight: 0.7 - StanceCollider: {fileID: 4528203470625763702} - playerCrouchStance: - CameraHeight: 0.3 - StanceCollider: {fileID: 4528203470625763690} - playerProneStance: - CameraHeight: -0.58 - StanceCollider: {fileID: 4528203470625763703} - currentWeapon: {fileID: 0} ---- !u!114 &1061105263471521090 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: a6f2a081cfc8c4b4bb6864331109d147, type: 3} - m_Name: - m_EditorClassIdentifier: - agentParameters: - maxStep: 0 - hasUpgradedFromAgentParameters: 1 - MaxStep: 10 - moveController: {fileID: 1809549200} ---- !u!114 &1809549200 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: d3ebcf807a37f344998fd648dfc9376d, type: 3} - m_Name: - m_EditorClassIdentifier: - mapManager: {fileID: 0} - navMeshAgent: {fileID: 1809549201} ---- !u!195 &1809549201 +--- !u!195 &7254047075221496626 NavMeshAgent: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} + m_GameObject: {fileID: 5245491127989480125} m_Enabled: 1 m_AgentTypeID: 0 - m_Radius: 0.5 + m_Radius: 0.5000001 m_Speed: 3.5 m_Acceleration: 8 avoidancePriority: 50 @@ -340,20 +154,49 @@ NavMeshAgent: m_BaseOffset: 1 m_WalkableMask: 4294967295 m_ObstacleAvoidanceType: 4 ---- !u!114 &1809549212 +--- !u!114 &5182704636738128575 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203470625763699} + m_GameObject: {fileID: 5245491127989480125} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: d3ebcf807a37f344998fd648dfc9376d, type: 3} + m_Name: + m_EditorClassIdentifier: + navMeshAgent: {fileID: 0} + flag: {fileID: 0} +--- !u!114 &5220658550450318085 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5245491127989480125} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: dd8012d5925524537b27131fef517017, type: 3} + m_Name: + m_EditorClassIdentifier: + m_SensorName: BufferSensor + m_ObservableSize: 0 + m_MaxNumObservables: 0 +--- !u!114 &4264677542023120457 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5245491127989480125} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} m_Name: m_EditorClassIdentifier: m_BrainParameters: - VectorObservationSize: 4 + VectorObservationSize: 1 NumStackedVectorObservations: 1 m_ActionSpec: m_NumContinuousActions: 0 @@ -363,14 +206,44 @@ MonoBehaviour: VectorActionSpaceType: 0 hasUpgradedBrainParametersWithActionSpec: 1 m_Model: {fileID: 0} - m_InferenceDevice: 3 - m_BehaviorType: 1 - m_BehaviorName: Defender Behaviour + m_InferenceDevice: 0 + m_BehaviorType: 0 + m_BehaviorName: My Behavior TeamId: 0 - m_UseChildSensors: 0 + m_UseChildSensors: 1 m_UseChildActuators: 1 - m_ObservableAttributeHandling: 2 ---- !u!1 &4528203471164033736 + m_ObservableAttributeHandling: 0 +--- !u!114 &4715950961444674817 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5245491127989480125} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: a6f2a081cfc8c4b4bb6864331109d147, type: 3} + m_Name: + m_EditorClassIdentifier: + agentParameters: + maxStep: 0 + hasUpgradedFromAgentParameters: 1 + MaxStep: 0 +--- !u!114 &5653209015090846528 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5245491127989480125} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: d3ebcf807a37f344998fd648dfc9376d, type: 3} + m_Name: + m_EditorClassIdentifier: + navMeshAgent: {fileID: 7254047075221496626} + flag: {fileID: 0} +--- !u!1 &5245491128202443531 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} @@ -378,29 +251,82 @@ GameObject: m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - - component: {fileID: 4528203471164033737} + - component: {fileID: 5245491128202443574} + - component: {fileID: 5245491128202443529} + - component: {fileID: 5245491128202443528} m_Layer: 8 - m_Name: FeetTransform + m_Name: Camera m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!4 &4528203471164033737 +--- !u!4 &5245491128202443574 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203471164033736} + m_GameObject: {fileID: 5245491128202443531} m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: -1, z: 0} + m_LocalPosition: {x: 0, y: 0.388, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] - m_Father: {fileID: 4528203470625763689} - m_RootOrder: 1 + m_Father: {fileID: 5245491129196666053} + m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &4528203471293941514 +--- !u!20 &5245491128202443529 +Camera: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5245491128202443531} + m_Enabled: 1 + serializedVersion: 2 + m_ClearFlags: 1 + m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} + m_projectionMatrixMode: 1 + m_GateFitMode: 2 + m_FOVAxisMode: 0 + m_SensorSize: {x: 36, y: 24} + m_LensShift: {x: 0, y: 0} + m_FocalLength: 50 + m_NormalizedViewPortRect: + serializedVersion: 2 + x: 0 + y: 0 + width: 1 + height: 1 + near clip plane: 0.1 + far clip plane: 1000 + field of view: 60 + orthographic: 0 + orthographic size: 5 + m_Depth: 0 + m_CullingMask: + serializedVersion: 2 + m_Bits: 4294967295 + m_RenderingPath: -1 + m_TargetTexture: {fileID: 0} + m_TargetDisplay: 0 + m_TargetEye: 3 + m_HDR: 1 + m_AllowMSAA: 1 + m_AllowDynamicResolution: 0 + m_ForceIntoRT: 0 + m_OcclusionCulling: 1 + m_StereoConvergence: 10 + m_StereoSeparation: 0.022 +--- !u!81 &5245491128202443528 +AudioListener: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5245491128202443531} + m_Enabled: 1 +--- !u!1 &5245491129196666052 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} @@ -408,7 +334,7 @@ GameObject: m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - - component: {fileID: 4528203471293941515} + - component: {fileID: 5245491129196666053} m_Layer: 8 m_Name: CameraHolder m_TagString: Untagged @@ -416,18 +342,194 @@ GameObject: m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!4 &4528203471293941515 +--- !u!4 &5245491129196666053 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4528203471293941514} + m_GameObject: {fileID: 5245491129196666052} m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} m_LocalPosition: {x: 0, y: 0.7, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - - {fileID: 4528203470433968376} - m_Father: {fileID: 4528203470625763689} + - {fileID: 5245491128202443574} + - {fileID: 8510909888689775087} + m_Father: {fileID: 5245491127989480103} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &5245491129603592454 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 5245491129603592455} + m_Layer: 8 + m_Name: FeetTransform + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &5245491129603592455 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5245491129603592454} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 0, y: -1, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 5245491127989480103} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &8510909888198732725 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 8510909888198732726} + - component: {fileID: 8510909888198732713} + - component: {fileID: 8510909888198732712} + - component: {fileID: 8510909888198732727} + m_Layer: 8 + m_Name: WeaponModel + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &8510909888198732726 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 8510909888198732725} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 8510909888689775087} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!33 &8510909888198732713 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 8510909888198732725} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!23 &8510909888198732712 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 8510909888198732725} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!65 &8510909888198732727 +BoxCollider: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 8510909888198732725} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 1 + serializedVersion: 2 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!1 &8510909888689775085 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 8510909888689775087} + - component: {fileID: 8510909888689775086} + m_Layer: 8 + m_Name: Weapon + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &8510909888689775087 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 8510909888689775085} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0.332, y: 0.038, z: 0.394} + m_LocalScale: {x: 0.16226998, y: 0.1581135, z: 1} + m_Children: + - {fileID: 8510909888198732726} + m_Father: {fileID: 5245491129196666053} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &8510909888689775086 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 8510909888689775085} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 088bf904d7c90a44dbb35c1d47c2692e, type: 3} + m_Name: + m_EditorClassIdentifier: + settings: + SwayAmount: 4 + SwayYInverted: 0 + SwayXInverted: 0 + SwaySmoothing: 0.1 + SwayResetSmoothing: 0.1 + SwayClampX: 8 + SwayClampY: 8 diff --git a/Assets/Prefabs/Player.prefab.meta b/Assets/Prefabs/Player.prefab.meta old mode 100755 new mode 100644 index 9180d60..43cb0ce --- a/Assets/Prefabs/Player.prefab.meta +++ b/Assets/Prefabs/Player.prefab.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 99bb17a23a489624baeaf337f91a4f84 +guid: a7480b77908b042d8adcdd84e8c2c15e PrefabImporter: externalObjects: {} userData: diff --git a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.meta b/Assets/Resources.meta similarity index 77% rename from Assets/Scenes/Greatest_map_ever/Greatest_map_ever.meta rename to Assets/Resources.meta index 583669d..ce0f261 100644 --- a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.meta +++ b/Assets/Resources.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 63fefa6166d5e6c4b96e83cc243f0a18 +guid: 0399b431a9578c84498be1b74b85e3b3 folderAsset: yes DefaultImporter: externalObjects: {} diff --git a/Assets/Resources/BillingMode.json b/Assets/Resources/BillingMode.json new file mode 100644 index 0000000..6f4bfb7 --- /dev/null +++ b/Assets/Resources/BillingMode.json @@ -0,0 +1 @@ +{"androidStore":"GooglePlay"} \ No newline at end of file diff --git a/Assets/ML-Agents/Timers/dont touch me plz_timers.json.meta b/Assets/Resources/BillingMode.json.meta old mode 100755 new mode 100644 similarity index 75% rename from Assets/ML-Agents/Timers/dont touch me plz_timers.json.meta rename to Assets/Resources/BillingMode.json.meta index db7171f..92a449b --- a/Assets/ML-Agents/Timers/dont touch me plz_timers.json.meta +++ b/Assets/Resources/BillingMode.json.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 249f8a9af2b0bc041a08a0009a6fdf44 +guid: a9107cc6399d82a40836e1a1be4e5030 TextScriptImporter: externalObjects: {} userData: diff --git a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity old mode 100755 new mode 100644 index 539c07c..1b0de26 --- a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity +++ b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever.unity @@ -259,7 +259,10 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: - PointId: 1 + navType: 1 + PointId: 0 + DeathAttr: 0 + EnemiesSeen: [] --- !u!23 &140697607 MeshRenderer: m_ObjectHideFlags: 0 @@ -352,7 +355,10 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: - PointId: 1 + navType: 1 + PointId: 0 + DeathAttr: 0 + EnemiesSeen: [] --- !u!23 &293522541 MeshRenderer: m_ObjectHideFlags: 0 @@ -1249,12 +1255,6 @@ Transform: m_Father: {fileID: 671439045} m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} ---- !u!4 &868386701 stripped -Transform: - m_CorrespondingSourceObject: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - m_PrefabInstance: {fileID: 1809549197} - m_PrefabAsset: {fileID: 0} --- !u!1 &884498019 GameObject: m_ObjectHideFlags: 0 @@ -1288,80 +1288,6 @@ Transform: m_Father: {fileID: 782729761} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1001 &1061293905 -PrefabInstance: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 868386701} - m_Modifications: - - target: {fileID: 2836004473841745626, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_Controller - value: - objectReference: {fileID: 9100000, guid: 95a44e56d04c7d248ba723eda9611c51, type: 2} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_RootOrder - value: 2 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalPosition.y - value: -1 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalRotation.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalEulerAnglesHint.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalEulerAnglesHint.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591197023, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_LocalEulerAnglesHint.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 2836004475591695743, guid: f432554f564e69242897607d34218939, - type: 3} - propertyPath: m_Name - value: DragonFucker - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_SourcePrefab: {fileID: 100100000, guid: f432554f564e69242897607d34218939, type: 3} --- !u!1 &1116745543 GameObject: m_ObjectHideFlags: 0 @@ -1393,7 +1319,10 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: - PointId: 1 + navType: 1 + PointId: 0 + DeathAttr: 0 + EnemiesSeen: [] --- !u!23 &1116745545 MeshRenderer: m_ObjectHideFlags: 0 @@ -1554,7 +1483,7 @@ Mesh: m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_Name: pb_Mesh17416 + m_Name: pb_Mesh16900 serializedVersion: 10 m_SubMeshes: - serializedVersion: 2 @@ -1840,7 +1769,10 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: - PointId: 1 + navType: 1 + PointId: 0 + DeathAttr: 0 + EnemiesSeen: [] --- !u!23 &1345085343 MeshRenderer: m_ObjectHideFlags: 0 @@ -1933,7 +1865,10 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: - PointId: 1 + navType: 1 + PointId: 0 + DeathAttr: 0 + EnemiesSeen: [] --- !u!23 &1488699524 MeshRenderer: m_ObjectHideFlags: 0 @@ -2152,7 +2087,10 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: - PointId: 1 + navType: 1 + PointId: 0 + DeathAttr: 0 + EnemiesSeen: [] --- !u!23 &1663305224 MeshRenderer: m_ObjectHideFlags: 0 @@ -2200,105 +2138,6 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1663305221} m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} ---- !u!1001 &1809549197 -PrefabInstance: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: -4942972567661207728, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_Enabled - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_RootOrder - value: 6 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalPosition.x - value: 42.23 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalPosition.y - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalPosition.z - value: -15.91 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalRotation.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalEulerAnglesHint.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalEulerAnglesHint.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763689, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalEulerAnglesHint.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763699, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_Name - value: Player - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763700, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: playerStance - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763700, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: playerCrouchStance.CameraHeight - value: 0.3 - objectReference: {fileID: 0} - - target: {fileID: 4528203470625763703, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_Enabled - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4528203471293941515, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4528203471293941515, guid: 99bb17a23a489624baeaf337f91a4f84, - type: 3} - propertyPath: m_LocalPosition.z - value: -2.417 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_SourcePrefab: {fileID: 100100000, guid: 99bb17a23a489624baeaf337f91a4f84, type: 3} --- !u!1 &1858987083 GameObject: m_ObjectHideFlags: 0 @@ -2311,8 +2150,10 @@ GameObject: - component: {fileID: 1858987086} - component: {fileID: 1858987084} - component: {fileID: 1858987089} + - component: {fileID: 1858987090} - component: {fileID: 1858987088} - component: {fileID: 1858987085} + - component: {fileID: 1858987091} m_Layer: 0 m_Name: Game m_TagString: Untagged @@ -2332,7 +2173,6 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 81d1d84442a0ba441976abd6fdd22788, type: 3} m_Name: m_EditorClassIdentifier: - CurrentTime: 0 --- !u!114 &1858987085 MonoBehaviour: m_ObjectHideFlags: 0 @@ -2406,6 +2246,38 @@ MonoBehaviour: - {fileID: 1345085342} - {fileID: 1116745544} - {fileID: 2004854094} +--- !u!114 &1858987090 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1858987083} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: b0835d77f48130e4f81c678f710bf87c, type: 3} + m_Name: + m_EditorClassIdentifier: + spawnPointsForDefendersTeam: + - {fileID: 140697606} + spawnPointsForAttackersTeam: + - {fileID: 2004854094} + AIPrefab: {fileID: 6171680323948707524, guid: b016874eb34cc084aa4359f0bbec50e1, + type: 3} + PlayerPrefab: {fileID: 5245491127989480125, guid: 80f6c1c85e5daed4c96c70205ed5503d, + type: 3} +--- !u!114 &1858987091 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1858987083} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: bf3fe86787bfb0c4b8751fe495148ede, type: 3} + m_Name: + m_EditorClassIdentifier: --- !u!1 &1871257865 GameObject: m_ObjectHideFlags: 0 @@ -2659,7 +2531,10 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} m_Name: m_EditorClassIdentifier: - PointId: 1 + navType: 1 + PointId: 0 + DeathAttr: 0 + EnemiesSeen: [] --- !u!23 &2004854095 MeshRenderer: m_ObjectHideFlags: 0 @@ -2877,88 +2752,6 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 2114154251} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!114 &5078004101906046130 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: a6f2a081cfc8c4b4bb6864331109d147, type: 3} - m_Name: - m_EditorClassIdentifier: - agentParameters: - maxStep: 0 - hasUpgradedFromAgentParameters: 1 - MaxStep: 100 - moveController: {fileID: 5242608118223468128} ---- !u!114 &5242608118223468128 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: d3ebcf807a37f344998fd648dfc9376d, type: 3} - m_Name: - m_EditorClassIdentifier: - navMeshAgent: {fileID: 5242608118223468129} ---- !u!195 &5242608118223468129 -NavMeshAgent: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Enabled: 1 - m_AgentTypeID: 0 - m_Radius: 0.5 - m_Speed: 3.5 - m_Acceleration: 8 - avoidancePriority: 50 - m_AngularSpeed: 120 - m_StoppingDistance: 0 - m_AutoTraverseOffMeshLink: 1 - m_AutoBraking: 1 - m_AutoRepath: 1 - m_Height: 2 - m_BaseOffset: 1 - m_WalkableMask: 4294967295 - m_ObstacleAvoidanceType: 4 ---- !u!114 &5242608118223468140 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} - m_Name: - m_EditorClassIdentifier: - m_BrainParameters: - VectorObservationSize: 4 - NumStackedVectorObservations: 1 - m_ActionSpec: - m_NumContinuousActions: 0 - BranchSizes: 01000000 - VectorActionSize: 01000000 - VectorActionDescriptions: [] - VectorActionSpaceType: 0 - hasUpgradedBrainParametersWithActionSpec: 1 - m_Model: {fileID: 0} - m_InferenceDevice: 0 - m_BehaviorType: 1 - m_BehaviorName: NPC - TeamId: 0 - m_UseChildSensors: 1 - m_UseChildActuators: 1 - m_ObservableAttributeHandling: 0 --- !u!1001 &6818223692685937217 PrefabInstance: m_ObjectHideFlags: 0 @@ -3033,366 +2826,3 @@ PrefabInstance: objectReference: {fileID: 0} m_RemovedComponents: [] m_SourcePrefab: {fileID: 100100000, guid: 1685c1d9ce4ab174f95c646b1826010b, type: 3} ---- !u!23 &8333476723876163232 -MeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 2 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 1 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!1 &8509012040201336570 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 8509012040201336571} - m_Layer: 8 - m_Name: CameraHolder - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &8509012040201336571 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040201336570} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0.7, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 8509012041069629704} - m_Father: {fileID: 8509012040873181337} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &8509012040340093752 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 8509012040340093753} - m_Layer: 8 - m_Name: FeetTransform - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &8509012040340093753 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040340093752} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: -1, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 8509012040873181337} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &8509012040873181315 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 8509012040873181337} - - component: {fileID: 8509012040873181336} - - component: {fileID: 8333476723876163232} - - component: {fileID: 8509012040873181318} - - component: {fileID: 8509012040873181338} - - component: {fileID: 8509012040873181319} - - component: {fileID: 8509012040873181317} - - component: {fileID: 8509012040873181316} - - component: {fileID: 5078004101906046130} - - component: {fileID: 5242608118223468128} - - component: {fileID: 5242608118223468129} - - component: {fileID: 5242608118223468140} - - component: {fileID: 8509012040873181320} - m_Layer: 8 - m_Name: 'Player ' - m_TagString: Defender - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!114 &8509012040873181316 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Enabled: 0 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 9826297ef4d853741b2af768441ec7f7, type: 3} - m_Name: - m_EditorClassIdentifier: - input_View: {x: 0, y: 0} - cameraHolder: {fileID: 8509012040201336571} - feetTransform: {fileID: 8509012040340093753} - playerSettings: - ViewXSensetivity: 20 - ViewYSensetivity: 20 - ViewXInverted: 0 - ViewYInverted: 0 - SprintingHold: 0 - MovementSmoothing: 0 - RunningForwardSpeed: 10 - RunningStrafeSpeed: 6 - WalkingForwardSpeed: 4 - WalkingBackwardSpeed: 2 - WalkingStrafeSpeed: 3 - JumpingHeight: 6 - JumpingFalloff: 1 - FallingSmoothing: 0 - SpeedEffector: 1 - CrouchSpeedEffector: 0 - ProneSpeedEffector: 0 - FallingSpeedEffector: 0 - ViewClampYMin: -70 - ViewClampYMax: 80 - playerMask: - serializedVersion: 2 - m_Bits: 55 - gravityAmount: 0.05 - gravityMin: -3 - jumpingForce: {x: 0, y: 0, z: 0} - playerStance: 0 - playerStanceSmoothing: 0.2 - playerStandStance: - CameraHeight: 0.7 - StanceCollider: {fileID: 8509012040873181318} - playerCrouchStance: - CameraHeight: 0.3 - StanceCollider: {fileID: 8509012040873181338} - playerProneStance: - CameraHeight: -0.58 - StanceCollider: {fileID: 8509012040873181319} - currentWeapon: {fileID: 0} ---- !u!143 &8509012040873181317 -CharacterController: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Height: 2 - m_Radius: 0.5 - m_SlopeLimit: 45 - m_StepOffset: 0.3 - m_SkinWidth: 0.08 - m_MinMoveDistance: 0.001 - m_Center: {x: 0, y: 0, z: 0} ---- !u!136 &8509012040873181318 -CapsuleCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - m_Radius: 0.3 - m_Height: 2 - m_Direction: 1 - m_Center: {x: 0, y: 0, z: 0} ---- !u!136 &8509012040873181319 -CapsuleCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.3 - m_Height: 0.8 - m_Direction: 1 - m_Center: {x: 0, y: -0.6, z: 0} ---- !u!114 &8509012040873181320 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 3a5c9d521e5ef4759a8246a07d52221e, type: 3} - m_Name: - m_EditorClassIdentifier: - DecisionPeriod: 1 - TakeActionsBetweenDecisions: 0 ---- !u!33 &8509012040873181336 -MeshFilter: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!4 &8509012040873181337 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_LocalRotation: {x: -0, y: 0.6820348, z: -0, w: 0.7313197} - m_LocalPosition: {x: 40.54, y: 1, z: -15.91} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 8509012040201336571} - - {fileID: 8509012040340093753} - m_Father: {fileID: 0} - m_RootOrder: 5 - m_LocalEulerAnglesHint: {x: 0, y: 86.006004, z: 0} ---- !u!136 &8509012040873181338 -CapsuleCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012040873181315} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - m_Radius: 0.3 - m_Height: 1.3 - m_Direction: 1 - m_Center: {x: 0, y: -0.35, z: 0} ---- !u!4 &8509012041069629704 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012041069629749} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0.734, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 8509012040201336571} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &8509012041069629749 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 8509012041069629704} - - component: {fileID: 8509012041069629751} - - component: {fileID: 8509012041069629750} - m_Layer: 8 - m_Name: Camera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!81 &8509012041069629750 -AudioListener: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012041069629749} - m_Enabled: 1 ---- !u!20 &8509012041069629751 -Camera: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8509012041069629749} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 1 - m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} - m_projectionMatrixMode: 1 - m_GateFitMode: 2 - m_FOVAxisMode: 0 - m_SensorSize: {x: 36, y: 24} - m_LensShift: {x: 0, y: 0} - m_FocalLength: 50 - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 diff --git a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever/NavMesh.asset b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever/NavMesh.asset deleted file mode 100644 index 141ab0e..0000000 Binary files a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever/NavMesh.asset and /dev/null differ diff --git a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever/NavMesh.asset.meta b/Assets/Scenes/Greatest_map_ever/Greatest_map_ever/NavMesh.asset.meta deleted file mode 100644 index 10e1563..0000000 --- a/Assets/Scenes/Greatest_map_ever/Greatest_map_ever/NavMesh.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 305bb221606d7a748acca94156e2d347 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 23800000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/Assets/Scenes/Greatest_map_ever/NavMesh.asset b/Assets/Scenes/Greatest_map_ever/NavMesh.asset index 71de95d..d98232a 100755 Binary files a/Assets/Scenes/Greatest_map_ever/NavMesh.asset and b/Assets/Scenes/Greatest_map_ever/NavMesh.asset differ diff --git a/Assets/Scenes/Greatest_map_ever/NavMesh.asset.meta b/Assets/Scenes/Greatest_map_ever/NavMesh.asset.meta index b38f8d9..10e1563 100755 --- a/Assets/Scenes/Greatest_map_ever/NavMesh.asset.meta +++ b/Assets/Scenes/Greatest_map_ever/NavMesh.asset.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 09beff657ef5d1c4eba194a01e121c1a +guid: 305bb221606d7a748acca94156e2d347 NativeFormatImporter: externalObjects: {} mainObjectFileID: 23800000 diff --git a/Assets/Scenes/Primitive scene/Primitive scene.unity b/Assets/Scenes/Primitive scene/Primitive scene.unity index 81e1d3e..0dc89cc 100644 --- a/Assets/Scenes/Primitive scene/Primitive scene.unity +++ b/Assets/Scenes/Primitive scene/Primitive scene.unity @@ -121,6 +121,190 @@ NavMeshSettings: debug: m_Flags: 0 m_NavMeshData: {fileID: 23800000, guid: 0a8e6bd940a44a94a85aa57b586f2c2f, type: 2} +--- !u!1 &2859775 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 2859776} + - component: {fileID: 2859779} + - component: {fileID: 2859778} + - component: {fileID: 2859777} + m_Layer: 0 + m_Name: navpointM (7) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &2859776 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2859775} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 41.72, y: 1.5, z: 47.82} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 38 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &2859777 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2859775} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &2859778 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2859775} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &2859779 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2859775} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &5230093 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 5230094} + - component: {fileID: 5230097} + - component: {fileID: 5230096} + - component: {fileID: 5230095} + m_Layer: 0 + m_Name: navpointM (30) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &5230094 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5230093} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -3.03, y: 1.5, z: 37.66} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 71 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &5230095 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5230093} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &5230096 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5230093} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &5230097 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 5230093} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &14486390 GameObject: m_ObjectHideFlags: 0 @@ -214,6 +398,441 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: PointId: 0 +--- !u!1 &17799734 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 17799735} + - component: {fileID: 17799737} + - component: {fileID: 17799736} + m_Layer: 0 + m_Name: navpointC (10) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &17799735 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 17799734} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 21.62, y: 1.5, z: 36.77} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 12 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &17799736 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 17799734} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &17799737 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 17799734} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &18626622 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 18626623} + - component: {fileID: 18626626} + - component: {fileID: 18626625} + - component: {fileID: 18626624} + m_Layer: 0 + m_Name: Cube (12) + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 1 + m_StaticEditorFlags: 40 + m_IsActive: 1 +--- !u!4 &18626623 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 18626622} + m_LocalRotation: {x: 0, y: 0.7071068, z: 0, w: 0.7071068} + m_LocalPosition: {x: -34.18, y: 0.75, z: 54.58} + m_LocalScale: {x: 3, y: 1.5, z: 0.3} + m_Children: [] + m_Father: {fileID: 722434249} + m_RootOrder: 14 + m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} +--- !u!65 &18626624 +BoxCollider: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 18626622} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 1 + serializedVersion: 2 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!23 &18626625 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 18626622} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &18626626 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 18626622} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &34117199 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 34117200} + - component: {fileID: 34117203} + - component: {fileID: 34117202} + - component: {fileID: 34117201} + m_Layer: 0 + m_Name: navpointM (19) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &34117200 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 34117199} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 21.27, y: 1.5, z: 45.75} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 56 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &34117201 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 34117199} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &34117202 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 34117199} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &34117203 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 34117199} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &34937690 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 34937691} + - component: {fileID: 34937693} + - component: {fileID: 34937692} + m_Layer: 0 + m_Name: navpointC (23) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &34937691 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 34937690} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 28.880003, y: 1.5, z: 54.989} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 25 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &34937692 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 34937690} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &34937693 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 34937690} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &40303683 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 40303684} + - component: {fileID: 40303687} + - component: {fileID: 40303686} + - component: {fileID: 40303685} + m_Layer: 0 + m_Name: navpointM (20) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &40303684 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 40303683} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 18.69, y: 1.5, z: 45.75} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 57 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &40303685 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 40303683} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &40303686 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 40303683} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &40303687 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 40303683} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &56419058 GameObject: m_ObjectHideFlags: 0 @@ -307,6 +926,85 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: PointId: 0 +--- !u!1 &64372562 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 64372563} + - component: {fileID: 64372565} + - component: {fileID: 64372564} + m_Layer: 0 + m_Name: navpointC (37) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &64372563 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 64372562} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -1.37, y: 1.5, z: 42.84} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 67 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &64372564 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 64372562} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &64372565 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 64372562} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &67303196 GameObject: m_ObjectHideFlags: 0 @@ -349,7 +1047,6 @@ GameObject: - component: {fileID: 97520834} - component: {fileID: 97520837} - component: {fileID: 97520836} - - component: {fileID: 97520835} m_Layer: 0 m_Name: navpointC m_TagString: Point @@ -365,25 +1062,12 @@ Transform: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 97520833} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalPosition: {x: 12.59, y: 1.5, z: 20.58} m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} m_Children: [] m_Father: {fileID: 1269785725} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!114 &97520835 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 97520833} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} - m_Name: - m_EditorClassIdentifier: - PointId: 1 --- !u!23 &97520836 MeshRenderer: m_ObjectHideFlags: 0 @@ -391,7 +1075,7 @@ MeshRenderer: m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 97520833} - m_Enabled: 0 + m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 m_DynamicOccludee: 1 @@ -462,7 +1146,7 @@ Transform: m_LocalScale: {x: 0.3, y: 3, z: 15} m_Children: [] m_Father: {fileID: 2059424040} - m_RootOrder: 31 + m_RootOrder: 30 m_LocalEulerAnglesHint: {x: 0, y: 45, z: 0} --- !u!65 &103297874 BoxCollider: @@ -524,6 +1208,269 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 103297872} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &110974655 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 110974656} + - component: {fileID: 110974658} + - component: {fileID: 110974657} + m_Layer: 0 + m_Name: navpointC (5) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &110974656 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 110974655} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 21.53, y: 1.5, z: 25.39} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 7 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &110974657 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 110974655} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &110974658 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 110974655} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &134763547 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 134763548} + - component: {fileID: 134763551} + - component: {fileID: 134763550} + - component: {fileID: 134763549} + m_Layer: 0 + m_Name: navpointM (22) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &134763548 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 134763547} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -4.31, y: 1.5, z: 25.22} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 59 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &134763549 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 134763547} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &134763550 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 134763547} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &134763551 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 134763547} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &159684852 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 159684853} + - component: {fileID: 159684856} + - component: {fileID: 159684855} + - component: {fileID: 159684854} + m_Layer: 0 + m_Name: navpointM (36) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &159684853 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 159684852} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 66.96, y: 1.5, z: 29.27} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 79 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &159684854 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 159684852} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &159684855 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 159684852} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &159684856 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 159684852} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &201972625 GameObject: m_ObjectHideFlags: 0 @@ -710,6 +1657,361 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 221415232} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &256981950 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 256981951} + - component: {fileID: 256981954} + - component: {fileID: 256981953} + - component: {fileID: 256981952} + m_Layer: 0 + m_Name: navpointM (14) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &256981951 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 256981950} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 47.06, y: 1.5, z: 27.17} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 47 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &256981952 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 256981950} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &256981953 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 256981950} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &256981954 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 256981950} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &272332852 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 272332853} + - component: {fileID: 272332856} + - component: {fileID: 272332855} + - component: {fileID: 272332854} + m_Layer: 0 + m_Name: navpointM (38) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &272332853 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 272332852} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 44.15, y: 1.5, z: -0.95} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 81 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &272332854 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 272332852} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &272332855 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 272332852} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &272332856 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 272332852} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &278017909 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 278017910} + - component: {fileID: 278017912} + - component: {fileID: 278017911} + m_Layer: 0 + m_Name: navpointC (25) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &278017910 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 278017909} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 31.970003, y: 1.5, z: 56.8} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 27 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &278017911 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 278017909} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &278017912 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 278017909} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &296441341 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 296441342} + - component: {fileID: 296441345} + - component: {fileID: 296441344} + - component: {fileID: 296441343} + m_Layer: 0 + m_Name: navpointM (9) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &296441342 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 296441341} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 56.53, y: 1.5, z: 18.91} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 40 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &296441343 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 296441341} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &296441344 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 296441341} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &296441345 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 296441341} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &306963312 GameObject: m_ObjectHideFlags: 0 @@ -803,6 +2105,282 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 306963312} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &315617675 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 315617676} + - component: {fileID: 315617679} + - component: {fileID: 315617678} + - component: {fileID: 315617677} + m_Layer: 0 + m_Name: navpointM (48) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &315617676 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 315617675} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 12.63, y: 1.5, z: 0.38} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 91 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &315617677 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 315617675} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &315617678 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 315617675} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &315617679 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 315617675} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &319771868 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 319771869} + - component: {fileID: 319771872} + - component: {fileID: 319771871} + - component: {fileID: 319771870} + m_Layer: 0 + m_Name: navpointM (18) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &319771869 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 319771868} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 50, y: 1.5, z: 52.45} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 51 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &319771870 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 319771868} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &319771871 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 319771868} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &319771872 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 319771868} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &329695249 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 329695250} + - component: {fileID: 329695253} + - component: {fileID: 329695252} + - component: {fileID: 329695251} + m_Layer: 0 + m_Name: navpointM (46) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &329695250 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 329695249} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 16.78, y: 1.5, z: -0.14000225} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 89 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &329695251 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 329695249} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &329695252 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 329695249} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &329695253 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 329695249} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &333222422 GameObject: m_ObjectHideFlags: 0 @@ -834,7 +2412,7 @@ Transform: m_LocalScale: {x: 1.26, y: 3, z: 23.69} m_Children: [] m_Father: {fileID: 2059424040} - m_RootOrder: 40 + m_RootOrder: 39 m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} --- !u!65 &333222424 BoxCollider: @@ -896,6 +2474,177 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 333222422} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &360417378 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 360417379} + - component: {fileID: 360417382} + - component: {fileID: 360417381} + - component: {fileID: 360417380} + m_Layer: 0 + m_Name: navpointM (2) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &360417379 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 360417378} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 27.39, y: 1.5, z: 52.33} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 33 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &360417380 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 360417378} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &360417381 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 360417378} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &360417382 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 360417378} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &362961675 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 362961676} + - component: {fileID: 362961678} + - component: {fileID: 362961677} + m_Layer: 0 + m_Name: navpointC (8) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &362961676 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 362961675} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 24.06, y: 1.5, z: 34.77} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 10 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &362961677 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 362961675} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &362961678 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 362961675} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &366909975 GameObject: m_ObjectHideFlags: 0 @@ -922,11 +2671,102 @@ Transform: m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} m_LocalPosition: {x: -90, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 2015542237} + m_Children: [] m_Father: {fileID: 541834960} m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &443593996 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 443593997} + - component: {fileID: 443594000} + - component: {fileID: 443593999} + - component: {fileID: 443593998} + m_Layer: 0 + m_Name: navpointM (12) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &443593997 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 443593996} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 41.36, y: 1.5, z: 32.95} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 43 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &443593998 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 443593996} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &443593999 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 443593996} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &443594000 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 443593996} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &444158371 GameObject: m_ObjectHideFlags: 0 @@ -1113,6 +2953,98 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 454615363} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &464994702 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 464994703} + - component: {fileID: 464994706} + - component: {fileID: 464994705} + - component: {fileID: 464994704} + m_Layer: 0 + m_Name: navpointM (40) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &464994703 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 464994702} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 44.15, y: 1.5, z: -7.74} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 83 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &464994704 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 464994702} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &464994705 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 464994702} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &464994706 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 464994702} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &471528217 GameObject: m_ObjectHideFlags: 0 @@ -1392,6 +3324,98 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 490106836} m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &496659013 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 496659014} + - component: {fileID: 496659017} + - component: {fileID: 496659016} + - component: {fileID: 496659015} + m_Layer: 0 + m_Name: navpointM (39) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &496659014 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 496659013} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 44.22, y: 1.5, z: 7.42} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 82 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &496659015 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 496659013} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &496659016 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 496659013} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &496659017 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 496659013} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &508478143 GameObject: m_ObjectHideFlags: 0 @@ -1578,6 +3602,85 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 510594852} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &514373050 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 514373051} + - component: {fileID: 514373053} + - component: {fileID: 514373052} + m_Layer: 0 + m_Name: navpointC (11) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &514373051 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 514373050} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 23.593, y: 1.5, z: 36.77} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 13 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &514373052 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 514373050} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &514373053 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 514373050} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &534459842 GameObject: m_ObjectHideFlags: 0 @@ -1702,6 +3805,177 @@ Transform: m_Father: {fileID: 760001244} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &535309527 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 535309528} + - component: {fileID: 535309530} + - component: {fileID: 535309529} + m_Layer: 0 + m_Name: navpointC (41) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &535309528 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 535309527} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 7.3099995, y: 1.5, z: 59.07} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 76 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &535309529 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 535309527} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &535309530 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 535309527} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &535540088 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 535540089} + - component: {fileID: 535540092} + - component: {fileID: 535540091} + - component: {fileID: 535540090} + m_Layer: 0 + m_Name: navpointM (25) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &535540089 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 535540088} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -2.87, y: 1.5, z: 21.12} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 62 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &535540090 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 535540088} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &535540091 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 535540088} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &535540092 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 535540088} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &541834959 GameObject: m_ObjectHideFlags: 0 @@ -1858,7 +4132,7 @@ Transform: m_LocalScale: {x: 5, y: 3, z: 5} m_Children: [] m_Father: {fileID: 2059424040} - m_RootOrder: 39 + m_RootOrder: 38 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!65 &557974904 BoxCollider: @@ -1920,6 +4194,269 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 557974902} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &566877376 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 566877377} + - component: {fileID: 566877380} + - component: {fileID: 566877379} + - component: {fileID: 566877378} + m_Layer: 0 + m_Name: navpointM (29) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &566877377 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 566877376} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 1.36, y: 1.5, z: 41.71} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 70 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &566877378 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 566877376} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &566877379 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 566877376} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &566877380 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 566877376} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &572539932 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 572539933} + - component: {fileID: 572539936} + - component: {fileID: 572539935} + - component: {fileID: 572539934} + m_Layer: 0 + m_Name: navpointM (31) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &572539933 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 572539932} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -7.18, y: 1.5, z: 41.71} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 72 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &572539934 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 572539932} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &572539935 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 572539932} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &572539936 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 572539932} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &586889191 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 586889192} + - component: {fileID: 586889194} + - component: {fileID: 586889193} + m_Layer: 0 + m_Name: navpointC (21) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &586889192 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 586889191} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 26.53, y: 1.5, z: 56.98} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 23 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &586889193 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 586889191} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &586889194 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 586889191} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &596558265 GameObject: m_ObjectHideFlags: 0 @@ -2478,6 +5015,98 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 654008964} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &704829436 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 704829437} + - component: {fileID: 704829440} + - component: {fileID: 704829439} + - component: {fileID: 704829438} + m_Layer: 0 + m_Name: navpointM (44) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &704829437 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 704829436} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 40.62, y: 1.5, z: -4.319999} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 87 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &704829438 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 704829436} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &704829439 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 704829436} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &704829440 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 704829436} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &710865455 GameObject: m_ObjectHideFlags: 0 @@ -2612,9 +5241,102 @@ Transform: - {fileID: 508478144} - {fileID: 2141093009} - {fileID: 1732971589} + - {fileID: 18626623} m_Father: {fileID: 2059424040} m_RootOrder: 13 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &723290358 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 723290359} + - component: {fileID: 723290362} + - component: {fileID: 723290361} + - component: {fileID: 723290360} + m_Layer: 0 + m_Name: navpointM (41) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &723290359 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 723290358} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 47.46, y: 1.5, z: 4.23} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 84 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &723290360 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 723290358} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &723290361 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 723290358} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &723290362 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 723290358} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &746474113 GameObject: m_ObjectHideFlags: 0 @@ -2801,6 +5523,98 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 752187221} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &753365123 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 753365124} + - component: {fileID: 753365127} + - component: {fileID: 753365126} + - component: {fileID: 753365125} + m_Layer: 0 + m_Name: navpointM (13) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &753365124 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 753365123} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 42.8, y: 1.5, z: 34.42} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 46 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &753365125 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 753365123} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &753365126 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 753365123} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &753365127 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 753365123} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &760001243 GameObject: m_ObjectHideFlags: 0 @@ -2836,6 +5650,190 @@ Transform: m_Father: {fileID: 0} m_RootOrder: 4 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &766882409 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 766882410} + - component: {fileID: 766882413} + - component: {fileID: 766882412} + - component: {fileID: 766882411} + m_Layer: 0 + m_Name: navpointM (11) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &766882410 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 766882409} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 48.64, y: 1.5, z: 28.67} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 42 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &766882411 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 766882409} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &766882412 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 766882409} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &766882413 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 766882409} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &771082156 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 771082157} + - component: {fileID: 771082160} + - component: {fileID: 771082159} + - component: {fileID: 771082158} + m_Layer: 0 + m_Name: navpointM (24) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &771082157 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 771082156} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0.68, y: 1.5, z: 18.82} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 61 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &771082158 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 771082156} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &771082159 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 771082156} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &771082160 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 771082156} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &772385942 GameObject: m_ObjectHideFlags: 0 @@ -2929,6 +5927,98 @@ Transform: m_Father: {fileID: 947663786} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &778000327 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 778000328} + - component: {fileID: 778000331} + - component: {fileID: 778000330} + - component: {fileID: 778000329} + m_Layer: 0 + m_Name: navpointM (3) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &778000328 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 778000327} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 27.39, y: 1.5, z: 39.31} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 34 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &778000329 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 778000327} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &778000330 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 778000327} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &778000331 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 778000327} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &798394371 GameObject: m_ObjectHideFlags: 0 @@ -3022,6 +6112,190 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 798394371} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &806988801 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 806988802} + - component: {fileID: 806988805} + - component: {fileID: 806988804} + - component: {fileID: 806988803} + m_Layer: 0 + m_Name: navpointM (45) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &806988802 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 806988801} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 21.170002, y: 1.5, z: 3.909997} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 88 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &806988803 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 806988801} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &806988804 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 806988801} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &806988805 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 806988801} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &811219973 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 811219974} + - component: {fileID: 811219977} + - component: {fileID: 811219976} + - component: {fileID: 811219975} + m_Layer: 0 + m_Name: navpointM (26) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &811219974 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 811219973} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -2.87, y: 1.5, z: 23.65} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 63 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &811219975 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 811219973} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &811219976 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 811219973} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &811219977 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 811219973} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &818693966 GameObject: m_ObjectHideFlags: 0 @@ -3115,6 +6389,98 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 818693966} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &826068032 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 826068033} + - component: {fileID: 826068036} + - component: {fileID: 826068035} + - component: {fileID: 826068034} + m_Layer: 0 + m_Name: navpointM (34) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &826068033 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 826068032} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 77.13, y: 1.5, z: 18.91} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 77 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &826068034 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 826068032} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &826068035 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 826068032} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &826068036 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 826068032} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &843995011 GameObject: m_ObjectHideFlags: 0 @@ -3394,6 +6760,85 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 866473127} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &872479533 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 872479534} + - component: {fileID: 872479536} + - component: {fileID: 872479535} + m_Layer: 0 + m_Name: navpointC (13) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &872479534 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 872479533} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 28.6, y: 1.5, z: 36.77} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 15 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &872479535 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 872479533} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &872479536 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 872479533} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &872906529 GameObject: m_ObjectHideFlags: 0 @@ -3487,6 +6932,98 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 872906529} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &877900778 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 877900779} + - component: {fileID: 877900782} + - component: {fileID: 877900781} + - component: {fileID: 877900780} + m_Layer: 0 + m_Name: navpointM (23) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &877900779 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 877900778} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -1.8999996, y: 1.5, z: 18.82} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 60 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &877900780 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 877900778} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &877900781 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 877900778} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &877900782 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 877900778} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &892755339 GameObject: m_ObjectHideFlags: 0 @@ -3518,7 +7055,7 @@ Transform: m_LocalScale: {x: 0.3, y: 3, z: 10} m_Children: [] m_Father: {fileID: 2059424040} - m_RootOrder: 34 + m_RootOrder: 33 m_LocalEulerAnglesHint: {x: 0, y: 45, z: 0} --- !u!65 &892755341 BoxCollider: @@ -3611,7 +7148,7 @@ Transform: m_LocalScale: {x: 0.3, y: 3, z: 8.03} m_Children: [] m_Father: {fileID: 2059424040} - m_RootOrder: 42 + m_RootOrder: 41 m_LocalEulerAnglesHint: {x: 0, y: 30, z: 0} --- !u!65 &902084818 BoxCollider: @@ -3673,6 +7210,98 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 902084816} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &911093814 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 911093815} + - component: {fileID: 911093818} + - component: {fileID: 911093817} + - component: {fileID: 911093816} + m_Layer: 0 + m_Name: navpointM (8) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &911093815 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 911093814} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 43.67, y: 1.5, z: 46.08} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 39 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &911093816 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 911093814} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &911093817 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 911093814} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &911093818 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 911093814} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &912660664 GameObject: m_ObjectHideFlags: 0 @@ -3798,9 +7427,89 @@ Transform: - {fileID: 1902232096} - {fileID: 912660665} - {fileID: 2051059935} + - {fileID: 2044688793} m_Father: {fileID: 535118393} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &965247295 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 965247296} + - component: {fileID: 965247298} + - component: {fileID: 965247297} + m_Layer: 0 + m_Name: navpointC (28) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &965247296 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 965247295} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 23.69, y: 1.5, z: 56.98} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 30 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &965247297 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 965247295} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &965247298 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 965247295} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &974382837 GameObject: m_ObjectHideFlags: 0 @@ -3987,6 +7696,164 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 990292141} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &991290901 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 991290902} + - component: {fileID: 991290904} + - component: {fileID: 991290903} + m_Layer: 0 + m_Name: navpointC (14) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &991290902 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 991290901} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 28.91, y: 1.5, z: 34.779} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 16 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &991290903 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 991290901} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &991290904 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 991290901} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1007052970 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1007052971} + - component: {fileID: 1007052973} + - component: {fileID: 1007052972} + m_Layer: 0 + m_Name: navpointC (27) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1007052971 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1007052970} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 19.12, y: 1.5, z: 54.99} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 29 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1007052972 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1007052970} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1007052973 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1007052970} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1023657118 GameObject: m_ObjectHideFlags: 0 @@ -4080,6 +7947,177 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: PointId: 0 +--- !u!1 &1038713577 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1038713578} + - component: {fileID: 1038713580} + - component: {fileID: 1038713579} + m_Layer: 0 + m_Name: navpointC (7) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1038713578 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1038713577} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 26.11, y: 1.5, z: 34.77} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 9 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1038713579 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1038713577} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1038713580 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1038713577} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1054746573 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1054746574} + - component: {fileID: 1054746577} + - component: {fileID: 1054746576} + - component: {fileID: 1054746575} + m_Layer: 0 + m_Name: navpointM (28) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1054746574 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1054746573} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -16.02, y: 1.5, z: 10.89} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 65 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &1054746575 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1054746573} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &1054746576 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1054746573} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1054746577 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1054746573} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1001 &1065627886 PrefabInstance: m_ObjectHideFlags: 0 @@ -4159,6 +8197,85 @@ PrefabInstance: objectReference: {fileID: 0} m_RemovedComponents: [] m_SourcePrefab: {fileID: 100100000, guid: c98043d57e0190845b790d7b92990bee, type: 3} +--- !u!1 &1078565088 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1078565089} + - component: {fileID: 1078565091} + - component: {fileID: 1078565090} + m_Layer: 0 + m_Name: navpointC (32) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1078565089 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1078565088} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 22.67, y: 1.5, z: 27.58} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 52 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1078565090 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1078565088} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1078565091 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1078565088} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1079073054 GameObject: m_ObjectHideFlags: 0 @@ -4190,6 +8307,164 @@ Transform: m_Father: {fileID: 541834960} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &1099723428 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1099723429} + - component: {fileID: 1099723431} + - component: {fileID: 1099723430} + m_Layer: 0 + m_Name: navpointC (29) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1099723429 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1099723428} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 21.54, y: 1.5, z: 56.98} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 31 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1099723430 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1099723428} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1099723431 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1099723428} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1100634598 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1100634599} + - component: {fileID: 1100634601} + - component: {fileID: 1100634600} + m_Layer: 0 + m_Name: navpointC (40) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1100634599 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1100634598} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 4.97, y: 1.5, z: 59.07} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 75 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1100634600 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1100634598} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1100634601 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1100634598} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1109367334 GameObject: m_ObjectHideFlags: 0 @@ -4376,6 +8651,85 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1166525586} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1199907099 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1199907100} + - component: {fileID: 1199907102} + - component: {fileID: 1199907101} + m_Layer: 0 + m_Name: navpointC (12) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1199907100 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1199907099} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 26.46, y: 1.5, z: 36.77} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 14 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1199907101 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1199907099} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1199907102 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1199907099} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1208130436 GameObject: m_ObjectHideFlags: 0 @@ -4483,6 +8837,98 @@ Transform: m_Father: {fileID: 0} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &1247656897 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1247656898} + - component: {fileID: 1247656901} + - component: {fileID: 1247656900} + - component: {fileID: 1247656899} + m_Layer: 0 + m_Name: navpointM (43) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1247656898 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1247656897} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 47.46, y: 1.5, z: -4.32} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 86 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &1247656899 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1247656897} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &1247656900 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1247656897} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1247656901 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1247656897} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1269785724 GameObject: m_ObjectHideFlags: 0 @@ -4515,9 +8961,189 @@ Transform: - {fileID: 2049548350} - {fileID: 1623447099} - {fileID: 850186964} + - {fileID: 1657630949} + - {fileID: 1780254486} + - {fileID: 110974656} + - {fileID: 1608580291} + - {fileID: 1038713578} + - {fileID: 362961676} + - {fileID: 1627189259} + - {fileID: 17799735} + - {fileID: 514373051} + - {fileID: 1199907100} + - {fileID: 872479534} + - {fileID: 991290902} + - {fileID: 1744542621} + - {fileID: 2085673840} + - {fileID: 1777274281} + - {fileID: 1747469716} + - {fileID: 1562514094} + - {fileID: 1609126277} + - {fileID: 586889192} + - {fileID: 1722017908} + - {fileID: 34937691} + - {fileID: 1861663019} + - {fileID: 278017910} + - {fileID: 1484134377} + - {fileID: 1007052971} + - {fileID: 965247296} + - {fileID: 1099723429} + - {fileID: 1752479210} + - {fileID: 360417379} + - {fileID: 778000328} + - {fileID: 2021461317} + - {fileID: 2097277516} + - {fileID: 1274673477} + - {fileID: 2859776} + - {fileID: 911093815} + - {fileID: 296441342} + - {fileID: 1958065632} + - {fileID: 766882410} + - {fileID: 443593997} + - {fileID: 1312979744} + - {fileID: 1372427145} + - {fileID: 753365124} + - {fileID: 256981951} + - {fileID: 1719721962} + - {fileID: 2061069356} + - {fileID: 2024428591} + - {fileID: 319771869} + - {fileID: 1078565089} + - {fileID: 1688740656} + - {fileID: 1515079626} + - {fileID: 1507655698} + - {fileID: 34117200} + - {fileID: 40303684} + - {fileID: 1518776455} + - {fileID: 134763548} + - {fileID: 877900779} + - {fileID: 771082157} + - {fileID: 535540089} + - {fileID: 811219974} + - {fileID: 1989253879} + - {fileID: 1054746574} + - {fileID: 1811425350} + - {fileID: 64372563} + - {fileID: 1895971582} + - {fileID: 1411627833} + - {fileID: 566877377} + - {fileID: 5230094} + - {fileID: 572539933} + - {fileID: 2093316296} + - {fileID: 1742013156} + - {fileID: 1100634599} + - {fileID: 535309528} + - {fileID: 826068033} + - {fileID: 1939525610} + - {fileID: 159684853} + - {fileID: 2081371645} + - {fileID: 272332853} + - {fileID: 496659014} + - {fileID: 464994703} + - {fileID: 723290359} + - {fileID: 1880711832} + - {fileID: 1247656898} + - {fileID: 704829437} + - {fileID: 806988802} + - {fileID: 329695250} + - {fileID: 1635600740} + - {fileID: 315617676} + - {fileID: 1999232105} m_Father: {fileID: 760001244} m_RootOrder: 4 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &1274673476 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1274673477} + - component: {fileID: 1274673480} + - component: {fileID: 1274673479} + - component: {fileID: 1274673478} + m_Layer: 0 + m_Name: navpointM (6) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1274673477 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1274673476} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 18.69, y: 1.5, z: 45.75} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 37 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &1274673478 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1274673476} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &1274673479 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1274673476} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1274673480 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1274673476} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1282427542 GameObject: m_ObjectHideFlags: 0 @@ -4611,6 +9237,85 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1282427542} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1312979743 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1312979744} + - component: {fileID: 1312979746} + - component: {fileID: 1312979745} + m_Layer: 0 + m_Name: navpointC (30) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1312979744 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1312979743} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 17.05, y: 1.5, z: 13.04} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 44 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1312979745 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1312979743} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1312979746 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1312979743} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1339430291 GameObject: m_ObjectHideFlags: 0 @@ -4642,7 +9347,7 @@ Transform: m_LocalScale: {x: 0.3, y: 3, z: 10} m_Children: [] m_Father: {fileID: 2059424040} - m_RootOrder: 35 + m_RootOrder: 34 m_LocalEulerAnglesHint: {x: 0, y: -45, z: 0} --- !u!65 &1339430293 BoxCollider: @@ -4828,7 +9533,7 @@ Transform: m_LocalScale: {x: 0.3, y: 3, z: 15} m_Children: [] m_Father: {fileID: 2059424040} - m_RootOrder: 33 + m_RootOrder: 32 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!65 &1356175824 BoxCollider: @@ -4890,6 +9595,85 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1356175822} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1372427144 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1372427145} + - component: {fileID: 1372427147} + - component: {fileID: 1372427146} + m_Layer: 0 + m_Name: navpointC (31) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1372427145 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1372427144} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 17.04, y: 1.5, z: 6.72} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 45 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1372427146 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1372427144} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1372427147 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1372427144} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1375408632 GameObject: m_ObjectHideFlags: 0 @@ -5014,7 +9798,7 @@ Transform: m_LocalScale: {x: 5, y: 3, z: 5} m_Children: [] m_Father: {fileID: 2059424040} - m_RootOrder: 38 + m_RootOrder: 37 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!65 &1376843260 BoxCollider: @@ -5252,6 +10036,85 @@ Transform: m_Father: {fileID: 0} m_RootOrder: 2 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &1411627832 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1411627833} + - component: {fileID: 1411627835} + - component: {fileID: 1411627834} + m_Layer: 0 + m_Name: navpointC (39) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1411627833 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1411627832} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -1.37, y: 1.5, z: 48.93} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 69 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1411627834 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1411627832} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1411627835 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1411627832} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1442283937 GameObject: m_ObjectHideFlags: 0 @@ -5376,7 +10239,7 @@ Transform: m_LocalScale: {x: 0.3, y: 3, z: 15} m_Children: [] m_Father: {fileID: 2059424040} - m_RootOrder: 32 + m_RootOrder: 31 m_LocalEulerAnglesHint: {x: 0, y: 135, z: 0} --- !u!65 &1449846516 BoxCollider: @@ -5717,6 +10580,335 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1476761850} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1484134376 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1484134377} + - component: {fileID: 1484134379} + - component: {fileID: 1484134378} + m_Layer: 0 + m_Name: navpointC (26) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1484134377 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1484134376} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 33.9, y: 1.5, z: 56.8} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 28 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1484134378 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1484134376} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1484134379 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1484134376} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1507655697 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1507655698} + - component: {fileID: 1507655700} + - component: {fileID: 1507655699} + m_Layer: 0 + m_Name: navpointC (35) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1507655698 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1507655697} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 14.28, y: 1.5, z: 32.85} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 55 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1507655699 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1507655697} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1507655700 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1507655697} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1515079625 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1515079626} + - component: {fileID: 1515079628} + - component: {fileID: 1515079627} + m_Layer: 0 + m_Name: navpointC (34) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1515079626 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1515079625} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 11.94, y: 1.5, z: 32.85} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 54 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1515079627 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1515079625} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1515079628 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1515079625} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1518776454 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1518776455} + - component: {fileID: 1518776458} + - component: {fileID: 1518776457} + - component: {fileID: 1518776456} + m_Layer: 0 + m_Name: navpointM (21) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1518776455 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1518776454} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -6.8899994, y: 1.5, z: 25.22} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 58 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &1518776456 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1518776454} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &1518776457 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1518776454} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1518776458 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1518776454} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!4 &1548996791 stripped Transform: m_CorrespondingSourceObject: {fileID: 7673487251499544664, guid: c98043d57e0190845b790d7b92990bee, @@ -5857,6 +11049,85 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1557566888} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1562514093 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1562514094} + - component: {fileID: 1562514096} + - component: {fileID: 1562514095} + m_Layer: 0 + m_Name: navpointC (19) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1562514094 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1562514093} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 24.130003, y: 1.5, z: 54.98} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 21 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1562514095 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1562514093} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1562514096 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1562514093} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1590752703 GameObject: m_ObjectHideFlags: 0 @@ -5888,7 +11159,7 @@ Transform: m_LocalScale: {x: 0.3, y: 3, z: 10} m_Children: [] m_Father: {fileID: 2059424040} - m_RootOrder: 37 + m_RootOrder: 36 m_LocalEulerAnglesHint: {x: 0, y: -45, z: 0} --- !u!65 &1590752705 BoxCollider: @@ -5950,6 +11221,164 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1590752703} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1608580290 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1608580291} + - component: {fileID: 1608580293} + - component: {fileID: 1608580292} + m_Layer: 0 + m_Name: navpointC (6) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1608580291 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1608580290} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 21.21, y: 1.5, z: 34.79} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 8 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1608580292 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1608580290} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1608580293 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1608580290} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1609126276 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1609126277} + - component: {fileID: 1609126279} + - component: {fileID: 1609126278} + m_Layer: 0 + m_Name: navpointC (20) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1609126277 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1609126276} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 21.23, y: 1.5, z: 54.99} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 22 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1609126278 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1609126276} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1609126279 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1609126276} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1623447098 GameObject: m_ObjectHideFlags: 0 @@ -6043,6 +11472,335 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1623447098} m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1627189258 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1627189259} + - component: {fileID: 1627189261} + - component: {fileID: 1627189260} + m_Layer: 0 + m_Name: navpointC (9) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1627189259 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1627189258} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 19.08, y: 1.5, z: 34.79} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 11 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1627189260 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1627189258} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1627189261 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1627189258} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1635600739 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1635600740} + - component: {fileID: 1635600743} + - component: {fileID: 1635600742} + - component: {fileID: 1635600741} + m_Layer: 0 + m_Name: navpointM (47) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1635600740 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1635600739} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 12.630001, y: 1.5, z: 3.909997} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 90 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &1635600741 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1635600739} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &1635600742 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1635600739} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1635600743 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1635600739} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1657630948 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1657630949} + - component: {fileID: 1657630951} + - component: {fileID: 1657630950} + m_Layer: 0 + m_Name: navpointC (3) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1657630949 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1657630948} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 14.78, y: 1.5, z: 13.04} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 5 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1657630950 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1657630948} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1657630951 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1657630948} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1688740655 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1688740656} + - component: {fileID: 1688740658} + - component: {fileID: 1688740657} + m_Layer: 0 + m_Name: navpointC (33) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1688740656 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1688740655} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 11.33, y: 1.5, z: 22.77} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 53 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1688740657 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1688740655} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1688740658 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1688740655} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1691119856 GameObject: m_ObjectHideFlags: 0 @@ -6322,6 +12080,177 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1702899413} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1719721961 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1719721962} + - component: {fileID: 1719721965} + - component: {fileID: 1719721964} + - component: {fileID: 1719721963} + m_Layer: 0 + m_Name: navpointM (15) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1719721962 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1719721961} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 52.95, y: 1.5, z: 44.06} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 48 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &1719721963 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1719721961} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &1719721964 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1719721961} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1719721965 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1719721961} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1722017907 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1722017908} + - component: {fileID: 1722017910} + - component: {fileID: 1722017909} + m_Layer: 0 + m_Name: navpointC (22) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1722017908 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1722017907} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 28.670004, y: 1.5, z: 56.98} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 24 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1722017909 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1722017907} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1722017910 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1722017907} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1732971588 GameObject: m_ObjectHideFlags: 0 @@ -6415,6 +12344,256 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1732971588} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1742013155 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1742013156} + - component: {fileID: 1742013159} + - component: {fileID: 1742013158} + - component: {fileID: 1742013157} + m_Layer: 0 + m_Name: navpointM (33) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1742013156 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1742013155} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 1.2, y: 1.5, z: 38.27} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 74 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &1742013157 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1742013155} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &1742013158 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1742013155} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1742013159 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1742013155} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1744542620 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1744542621} + - component: {fileID: 1744542623} + - component: {fileID: 1744542622} + m_Layer: 0 + m_Name: navpointC (15) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1744542621 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1744542620} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 31.18, y: 1.5, z: 34.779} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 17 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1744542622 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1744542620} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1744542623 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1744542620} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1747469715 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1747469716} + - component: {fileID: 1747469718} + - component: {fileID: 1747469717} + m_Layer: 0 + m_Name: navpointC (18) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1747469716 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1747469715} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 26.11, y: 1.5, z: 54.98} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 20 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1747469717 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1747469715} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1747469718 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1747469715} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1748842407 GameObject: m_ObjectHideFlags: 0 @@ -6452,6 +12631,256 @@ Transform: m_Father: {fileID: 2059424040} m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &1752479209 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1752479210} + - component: {fileID: 1752479213} + - component: {fileID: 1752479212} + - component: {fileID: 1752479211} + m_Layer: 0 + m_Name: navpointM (1) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1752479210 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1752479209} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 33.18, y: 1.5, z: 45.75} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 32 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &1752479211 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1752479209} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &1752479212 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1752479209} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1752479213 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1752479209} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1777274280 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1777274281} + - component: {fileID: 1777274283} + - component: {fileID: 1777274282} + m_Layer: 0 + m_Name: navpointC (17) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1777274281 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1777274280} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 33.9, y: 1.5, z: 36.59} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 19 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1777274282 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1777274280} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1777274283 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1777274280} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1780254485 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1780254486} + - component: {fileID: 1780254488} + - component: {fileID: 1780254487} + m_Layer: 0 + m_Name: navpointC (4) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1780254486 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1780254485} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 14.78, y: 1.5, z: 6.72} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 6 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1780254487 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1780254485} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1780254488 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1780254485} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1804984327 GameObject: m_ObjectHideFlags: 0 @@ -6638,6 +13067,164 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1809836366} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1811425349 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1811425350} + - component: {fileID: 1811425352} + - component: {fileID: 1811425351} + m_Layer: 0 + m_Name: navpointC (36) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1811425350 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1811425349} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -3.71, y: 1.5, z: 42.84} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 66 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1811425351 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1811425349} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1811425352 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1811425349} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1861663018 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1861663019} + - component: {fileID: 1861663021} + - component: {fileID: 1861663020} + m_Layer: 0 + m_Name: navpointC (24) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1861663019 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1861663018} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 31.07, y: 1.5, z: 54.989} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 26 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1861663020 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1861663018} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1861663021 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1861663018} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1866177184 GameObject: m_ObjectHideFlags: 0 @@ -6691,7 +13278,7 @@ MeshRenderer: m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1866177184} - m_Enabled: 0 + m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 m_DynamicOccludee: 1 @@ -6731,7 +13318,7 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1866177184} m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} ---- !u!1 &1873890277 +--- !u!1 &1880711831 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} @@ -6739,51 +13326,50 @@ GameObject: m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - - component: {fileID: 1873890278} - - component: {fileID: 1873890281} - - component: {fileID: 1873890280} - - component: {fileID: 1873890279} + - component: {fileID: 1880711832} + - component: {fileID: 1880711835} + - component: {fileID: 1880711834} + - component: {fileID: 1880711833} m_Layer: 0 - m_Name: Cube (28) - m_TagString: Untagged + m_Name: navpointM (42) + m_TagString: Point m_Icon: {fileID: 0} - m_NavMeshLayer: 1 - m_StaticEditorFlags: 40 + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!4 &1873890278 +--- !u!4 &1880711832 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 1873890277} + m_GameObject: {fileID: 1880711831} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -90, y: 1.5, z: 44.93} - m_LocalScale: {x: 0.3, y: 3, z: 10} + m_LocalPosition: {x: 40.62, y: 1.5, z: 4.23} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} m_Children: [] - m_Father: {fileID: 2059424040} - m_RootOrder: 30 + m_Father: {fileID: 1269785725} + m_RootOrder: 85 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!65 &1873890279 -BoxCollider: +--- !u!114 &1880711833 +MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 1873890277} - m_Material: {fileID: 0} - m_IsTrigger: 0 + m_GameObject: {fileID: 1880711831} m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!23 &1873890280 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &1880711834 MeshRenderer: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 1873890277} + m_GameObject: {fileID: 1880711831} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -6795,7 +13381,7 @@ MeshRenderer: m_RenderingLayerMask: 1 m_RendererPriority: 0 m_Materials: - - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -6816,14 +13402,93 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!33 &1873890281 +--- !u!33 &1880711835 MeshFilter: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 1873890277} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} + m_GameObject: {fileID: 1880711831} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1895971581 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1895971582} + - component: {fileID: 1895971584} + - component: {fileID: 1895971583} + m_Layer: 0 + m_Name: navpointC (38) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1895971582 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1895971581} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -3.71, y: 1.5, z: 48.93} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 68 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1895971583 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1895971581} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1895971584 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1895971581} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1902232095 GameObject: m_ObjectHideFlags: 0 @@ -7072,7 +13737,7 @@ Transform: m_LocalScale: {x: 0.3, y: 3, z: 10} m_Children: [] m_Father: {fileID: 2059424040} - m_RootOrder: 36 + m_RootOrder: 35 m_LocalEulerAnglesHint: {x: 0, y: 45, z: 0} --- !u!65 &1913878395 BoxCollider: @@ -7134,6 +13799,190 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1913878393} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1939525609 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1939525610} + - component: {fileID: 1939525613} + - component: {fileID: 1939525612} + - component: {fileID: 1939525611} + m_Layer: 0 + m_Name: navpointM (35) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1939525610 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1939525609} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 66.97, y: 1.5, z: 8.84} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 78 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &1939525611 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1939525609} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &1939525612 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1939525609} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1939525613 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1939525609} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1958065631 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1958065632} + - component: {fileID: 1958065635} + - component: {fileID: 1958065634} + - component: {fileID: 1958065633} + m_Layer: 0 + m_Name: navpointM (10) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1958065632 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1958065631} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 50.88, y: 1.5, z: 36} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 41 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &1958065633 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1958065631} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &1958065634 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1958065631} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1958065635 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1958065631} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1959130206 GameObject: m_ObjectHideFlags: 0 @@ -7165,7 +14014,7 @@ Transform: m_LocalScale: {x: 0.3, y: 3, z: 10} m_Children: [] m_Father: {fileID: 2059424040} - m_RootOrder: 41 + m_RootOrder: 40 m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} --- !u!65 &1959130208 BoxCollider: @@ -7320,6 +14169,190 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1960832333} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1989253878 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1989253879} + - component: {fileID: 1989253882} + - component: {fileID: 1989253881} + - component: {fileID: 1989253880} + m_Layer: 0 + m_Name: navpointM (27) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1989253879 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1989253878} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -17.47, y: 1.5, z: 13.49} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 64 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &1989253880 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1989253878} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &1989253881 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1989253878} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1989253882 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1989253878} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1999232104 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 1999232105} + - component: {fileID: 1999232108} + - component: {fileID: 1999232107} + - component: {fileID: 1999232106} + m_Layer: 0 + m_Name: navpointM (49) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1999232105 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1999232104} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 21.010002, y: 1.5, z: 0.46999836} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 92 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &1999232106 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1999232104} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &1999232107 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1999232104} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &1999232108 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1999232104} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &2003256938 GameObject: m_ObjectHideFlags: 0 @@ -7413,7 +14446,7 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 2003256938} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!1 &2015542236 +--- !u!1 &2021461316 GameObject: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} @@ -7421,51 +14454,50 @@ GameObject: m_PrefabAsset: {fileID: 0} serializedVersion: 6 m_Component: - - component: {fileID: 2015542237} - - component: {fileID: 2015542240} - - component: {fileID: 2015542239} - - component: {fileID: 2015542238} + - component: {fileID: 2021461317} + - component: {fileID: 2021461320} + - component: {fileID: 2021461319} + - component: {fileID: 2021461318} m_Layer: 0 - m_Name: Cube - m_TagString: Untagged + m_Name: navpointM (4) + m_TagString: Point m_Icon: {fileID: 0} - m_NavMeshLayer: 1 - m_StaticEditorFlags: 40 + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!4 &2015542237 +--- !u!4 &2021461317 Transform: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2015542236} + m_GameObject: {fileID: 2021461316} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 1.5, z: 0} - m_LocalScale: {x: 0.3, y: 3, z: 10} + m_LocalPosition: {x: 21.27, y: 1.5, z: 45.75} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} m_Children: [] - m_Father: {fileID: 366909976} - m_RootOrder: 0 + m_Father: {fileID: 1269785725} + m_RootOrder: 35 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!65 &2015542238 -BoxCollider: +--- !u!114 &2021461318 +MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2015542236} - m_Material: {fileID: 0} - m_IsTrigger: 0 + m_GameObject: {fileID: 2021461316} m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!23 &2015542239 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &2021461319 MeshRenderer: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2015542236} + m_GameObject: {fileID: 2021461316} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -7477,7 +14509,7 @@ MeshRenderer: m_RenderingLayerMask: 1 m_RendererPriority: 0 m_Materials: - - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -7498,14 +14530,106 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!33 &2015542240 +--- !u!33 &2021461320 MeshFilter: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 2015542236} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} + m_GameObject: {fileID: 2021461316} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &2024428590 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 2024428591} + - component: {fileID: 2024428594} + - component: {fileID: 2024428593} + - component: {fileID: 2024428592} + m_Layer: 0 + m_Name: navpointM (17) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &2024428591 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2024428590} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 48.050007, y: 1.5, z: 54.19} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 50 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &2024428592 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2024428590} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &2024428593 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2024428590} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &2024428594 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2024428590} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &2028444957 GameObject: m_ObjectHideFlags: 0 @@ -7598,6 +14722,99 @@ Transform: m_Father: {fileID: 0} m_RootOrder: 3 m_LocalEulerAnglesHint: {x: 50, y: -30, z: 0} +--- !u!1 &2044688792 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 2044688793} + - component: {fileID: 2044688796} + - component: {fileID: 2044688795} + - component: {fileID: 2044688794} + m_Layer: 0 + m_Name: Cube (6) + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 8 + m_IsActive: 1 +--- !u!4 &2044688793 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2044688792} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 50.6, y: -0.5, z: 0} + m_LocalScale: {x: 100, y: 1, z: 100} + m_Children: [] + m_Father: {fileID: 947663786} + m_RootOrder: 5 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!65 &2044688794 +BoxCollider: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2044688792} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 1 + serializedVersion: 2 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!23 &2044688795 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2044688792} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &2044688796 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2044688792} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &2049548349 GameObject: m_ObjectHideFlags: 0 @@ -7625,7 +14842,7 @@ Transform: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 2049548349} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 30.59, y: 0, z: -1.58} + m_LocalPosition: {x: 30.59, y: 1.5, z: -1.58} m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} m_Children: [] m_Father: {fileID: 1269785725} @@ -7651,7 +14868,7 @@ MeshRenderer: m_PrefabInstance: {fileID: 0} m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 2049548349} - m_Enabled: 0 + m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 m_DynamicOccludee: 1 @@ -7841,7 +15058,6 @@ Transform: - {fileID: 596558266} - {fileID: 1282427543} - {fileID: 746474114} - - {fileID: 1873890278} - {fileID: 103297873} - {fileID: 1449846515} - {fileID: 1356175823} @@ -7855,9 +15071,102 @@ Transform: - {fileID: 1959130207} - {fileID: 902084817} - {fileID: 2138138039} + - {fileID: 2108877460} m_Father: {fileID: 760001244} m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &2061069355 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 2061069356} + - component: {fileID: 2061069359} + - component: {fileID: 2061069358} + - component: {fileID: 2061069357} + m_Layer: 0 + m_Name: navpointM (16) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &2061069356 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2061069355} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 50.88, y: 1.5, z: 44.06} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 49 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &2061069357 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2061069355} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &2061069358 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2061069355} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &2061069359 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2061069355} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &2062075265 GameObject: m_ObjectHideFlags: 0 @@ -7951,6 +15260,177 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 2062075265} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &2081371644 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 2081371645} + - component: {fileID: 2081371648} + - component: {fileID: 2081371647} + - component: {fileID: 2081371646} + m_Layer: 0 + m_Name: navpointM (37) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &2081371645 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2081371644} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 44.12, y: 1.5, z: 0.74} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 80 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &2081371646 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2081371644} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &2081371647 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2081371644} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &2081371648 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2081371644} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &2085673839 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 2085673840} + - component: {fileID: 2085673842} + - component: {fileID: 2085673841} + m_Layer: 0 + m_Name: navpointC (16) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &2085673840 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2085673839} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 31.9, y: 1.5, z: 36.59} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 18 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &2085673841 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2085673839} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &2085673842 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2085673839} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &2092116840 GameObject: m_ObjectHideFlags: 0 @@ -8044,6 +15524,283 @@ MeshFilter: m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 2092116840} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &2093316295 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 2093316296} + - component: {fileID: 2093316299} + - component: {fileID: 2093316298} + - component: {fileID: 2093316297} + m_Layer: 0 + m_Name: navpointM (32) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &2093316296 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2093316295} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -7.18, y: 1.5, z: 38.18} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 73 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &2093316297 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2093316295} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &2093316298 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2093316295} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &2093316299 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2093316295} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &2097277515 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 2097277516} + - component: {fileID: 2097277519} + - component: {fileID: 2097277518} + - component: {fileID: 2097277517} + m_Layer: 0 + m_Name: navpointM (5) + m_TagString: Point + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &2097277516 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2097277515} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 36.03, y: 1.5, z: 45.75} + m_LocalScale: {x: 1.4367, y: 1.4367, z: 1.4367} + m_Children: [] + m_Father: {fileID: 1269785725} + m_RootOrder: 36 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &2097277517 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2097277515} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 6a2d29bfc31a8cf4e831e3bb80720414, type: 3} + m_Name: + m_EditorClassIdentifier: +--- !u!23 &2097277518 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2097277515} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 2100000, guid: 536ae120520e6f245bad32b94a03f6a2, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &2097277519 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2097277515} + m_Mesh: {fileID: 10206, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &2108877459 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 2108877460} + - component: {fileID: 2108877463} + - component: {fileID: 2108877462} + - component: {fileID: 2108877461} + m_Layer: 0 + m_Name: Cube (42) + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 1 + m_StaticEditorFlags: 40 + m_IsActive: 1 +--- !u!4 &2108877460 +Transform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2108877459} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 13.91, y: 1.5, z: 10.51} + m_LocalScale: {x: 0.3, y: 3, z: 3.06} + m_Children: [] + m_Father: {fileID: 2059424040} + m_RootOrder: 43 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!65 &2108877461 +BoxCollider: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2108877459} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 1 + serializedVersion: 2 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!23 &2108877462 +MeshRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2108877459} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 + m_RenderingLayerMask: 1 + m_RendererPriority: 0 + m_Materials: + - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_ReceiveGI: 1 + m_PreserveUVs: 0 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 1 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!33 &2108877463 +MeshFilter: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 2108877459} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &2124998309 GameObject: m_ObjectHideFlags: 0 @@ -8261,7 +16018,7 @@ Transform: m_LocalScale: {x: 5, y: 3, z: 22.74} m_Children: [] m_Father: {fileID: 2059424040} - m_RootOrder: 43 + m_RootOrder: 42 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!65 &2138138040 BoxCollider: diff --git a/Assets/Scenes/tyt player.meta b/Assets/Scenes/tyt player.meta old mode 100755 new mode 100644 diff --git a/Assets/Scenes/tyt player/Player.prefab b/Assets/Scenes/tyt player/Player.prefab deleted file mode 100755 index 280ef7e..0000000 --- a/Assets/Scenes/tyt player/Player.prefab +++ /dev/null @@ -1,572 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!1 &4839747462788443060 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 8723084081118426009} - - component: {fileID: 5958306789513936477} - m_Layer: 0 - m_Name: WeaponAnimations - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &8723084081118426009 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4839747462788443060} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 2009060004548700032} - m_Father: {fileID: 8510909888689775087} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!95 &5958306789513936477 -Animator: - serializedVersion: 3 - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 4839747462788443060} - m_Enabled: 1 - m_Avatar: {fileID: 0} - m_Controller: {fileID: 9100000, guid: 1ae0500836ae5b040ab0c41e276bf073, type: 2} - m_CullingMode: 0 - m_UpdateMode: 0 - m_ApplyRootMotion: 0 - m_LinearVelocityBlending: 0 - m_WarningMessage: - m_HasTransformHierarchy: 1 - m_AllowConstantClipSamplingOptimization: 1 - m_KeepAnimatorControllerStateOnDisable: 0 ---- !u!1 &5245491127989480125 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 5245491127989480103} - - component: {fileID: 5245491127989480102} - - component: {fileID: 5583297852527723678} - - component: {fileID: 5245491127989480120} - - component: {fileID: 5245491127989480100} - - component: {fileID: 5245491127989480121} - - component: {fileID: 5245491127989480123} - - component: {fileID: 5245491127989480122} - - component: {fileID: 8693388664594773644} - m_Layer: 8 - m_Name: Player - m_TagString: Defender - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &5245491127989480103 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_LocalRotation: {x: 0, y: 0.7071068, z: 0, w: 0.7071068} - m_LocalPosition: {x: 28.21, y: 10.9, z: 46.67} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 5245491129196666053} - - {fileID: 5245491129603592455} - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} ---- !u!33 &5245491127989480102 -MeshFilter: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!23 &5583297852527723678 -MeshRenderer: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_RayTracingMode: 2 - m_RenderingLayerMask: 1 - m_RendererPriority: 0 - m_Materials: - - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_ReceiveGI: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 1 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!136 &5245491127989480120 -CapsuleCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - m_Radius: 0.3 - m_Height: 2 - m_Direction: 1 - m_Center: {x: 0, y: 0, z: 0} ---- !u!136 &5245491127989480100 -CapsuleCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - m_Radius: 0.3 - m_Height: 1.3 - m_Direction: 1 - m_Center: {x: 0, y: -0.35, z: 0} ---- !u!136 &5245491127989480121 -CapsuleCollider: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.3 - m_Height: 0.8 - m_Direction: 1 - m_Center: {x: 0, y: -0.6, z: 0} ---- !u!143 &5245491127989480123 -CharacterController: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Height: 2 - m_Radius: 0.5 - m_SlopeLimit: 45 - m_StepOffset: 0.3 - m_SkinWidth: 0.08 - m_MinMoveDistance: 0.001 - m_Center: {x: 0, y: 0, z: 0} ---- !u!114 &5245491127989480122 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 9826297ef4d853741b2af768441ec7f7, type: 3} - m_Name: - m_EditorClassIdentifier: - input_Movement: {x: 0, y: 0} - input_View: {x: 0, y: 0} - cameraHolder: {fileID: 5245491129196666053} - feetTransform: {fileID: 5245491129603592455} - playerSettings: - ViewXSensetivity: 20 - ViewYSensetivity: 20 - ViewXInverted: 0 - ViewYInverted: 0 - SprintingHold: 0 - MovementSmoothing: 0 - RunningForwardSpeed: 10 - RunningStrafeSpeed: 6 - WalkingForwardSpeed: 4 - WalkingBackwardSpeed: 2 - WalkingStrafeSpeed: 3 - JumpingHeight: 6 - JumpingFalloff: 1 - FallingSmoothing: 0 - SpeedEffector: 1 - CrouchSpeedEffector: 0 - ProneSpeedEffector: 0 - FallingSpeedEffector: 0 - ViewClampYMin: -70 - ViewClampYMax: 80 - playerMask: - serializedVersion: 2 - m_Bits: 55 - gravityAmount: 0.05 - gravityMin: -3 - jumpingForce: {x: 0, y: 0, z: 0} - playerStance: 0 - playerStanceSmoothing: 0.2 - playerStandStance: - CameraHeight: 0.7 - StanceCollider: {fileID: 5245491127989480120} - playerCrouchStance: - CameraHeight: 0.3 - StanceCollider: {fileID: 5245491127989480100} - playerProneStance: - CameraHeight: -0.58 - StanceCollider: {fileID: 5245491127989480121} - isSprinting: 0 - currentWeapon: {fileID: 8510909888689775086} - weaponAnimationSpeed: 0 - damage: 10 - range: 100 - fpsCam: {fileID: 0} - muzzleFlash: {fileID: 0} - impactEffect: {fileID: 0} ---- !u!114 &8693388664594773644 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491127989480125} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: a6f2a081cfc8c4b4bb6864331109d147, type: 3} - m_Name: - m_EditorClassIdentifier: - agentParameters: - maxStep: 0 - hasUpgradedFromAgentParameters: 1 - MaxStep: 0 - moveController: {fileID: 0} ---- !u!1 &5245491128202443531 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 5245491128202443574} - - component: {fileID: 5245491128202443529} - - component: {fileID: 5245491128202443528} - m_Layer: 8 - m_Name: Camera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &5245491128202443574 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491128202443531} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0.388, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 5245491129196666053} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!20 &5245491128202443529 -Camera: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491128202443531} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 1 - m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} - m_projectionMatrixMode: 1 - m_GateFitMode: 2 - m_FOVAxisMode: 0 - m_SensorSize: {x: 36, y: 24} - m_LensShift: {x: 0, y: 0} - m_FocalLength: 50 - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.1 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!81 &5245491128202443528 -AudioListener: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491128202443531} - m_Enabled: 1 ---- !u!1 &5245491129196666052 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 5245491129196666053} - m_Layer: 8 - m_Name: CameraHolder - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &5245491129196666053 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491129196666052} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0.7, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 5245491128202443574} - - {fileID: 8510909888689775087} - m_Father: {fileID: 5245491127989480103} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &5245491129603592454 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 5245491129603592455} - m_Layer: 8 - m_Name: FeetTransform - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &5245491129603592455 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 5245491129603592454} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: -1, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 5245491127989480103} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &8510909888689775085 -GameObject: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - serializedVersion: 6 - m_Component: - - component: {fileID: 8510909888689775087} - - component: {fileID: 8510909888689775086} - m_Layer: 8 - m_Name: Weapon - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &8510909888689775087 -Transform: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8510909888689775085} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0.332, y: 0.038, z: 0.394} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 8723084081118426009} - m_Father: {fileID: 5245491129196666053} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!114 &8510909888689775086 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8510909888689775085} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 088bf904d7c90a44dbb35c1d47c2692e, type: 3} - m_Name: - m_EditorClassIdentifier: - settings: - SwayAmount: 4 - SwayYInverted: 0 - SwayXInverted: 0 - SwaySmoothing: 0.1 - SwayResetSmoothing: 0.1 - SwayClampX: 8 - SwayClampY: 8 - MovementSwayX: 0 - MovementSwayY: 0 - MovementSwayYInverted: 0 - MovementSwayXInverted: 0 - MovementSwaySmoothing: 0 - weaponAnimator: {fileID: 5958306789513936477} ---- !u!1001 &6817088243126258842 -PrefabInstance: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 8723084081118426009} - m_Modifications: - - target: {fileID: -4216859302048453862, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - propertyPath: m_RootOrder - value: 0 - objectReference: {fileID: 0} - - target: {fileID: -4216859302048453862, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - propertyPath: m_LocalScale.x - value: 0.1 - objectReference: {fileID: 0} - - target: {fileID: -4216859302048453862, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - propertyPath: m_LocalScale.y - value: 0.1 - objectReference: {fileID: 0} - - target: {fileID: -4216859302048453862, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - propertyPath: m_LocalScale.z - value: 0.1 - objectReference: {fileID: 0} - - target: {fileID: -4216859302048453862, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - propertyPath: m_LocalPosition.x - value: 0.145 - objectReference: {fileID: 0} - - target: {fileID: -4216859302048453862, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - propertyPath: m_LocalPosition.y - value: -0.76 - objectReference: {fileID: 0} - - target: {fileID: -4216859302048453862, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: -4216859302048453862, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - propertyPath: m_LocalRotation.w - value: 0 - objectReference: {fileID: 0} - - target: {fileID: -4216859302048453862, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: -4216859302048453862, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - propertyPath: m_LocalRotation.y - value: 1 - objectReference: {fileID: 0} - - target: {fileID: -4216859302048453862, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: -4216859302048453862, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - propertyPath: m_LocalEulerAnglesHint.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: -4216859302048453862, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - propertyPath: m_LocalEulerAnglesHint.y - value: 180 - objectReference: {fileID: 0} - - target: {fileID: -4216859302048453862, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - propertyPath: m_LocalEulerAnglesHint.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: -927199367670048503, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - propertyPath: m_Name - value: AKM - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_SourcePrefab: {fileID: 100100000, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, type: 3} ---- !u!4 &2009060004548700032 stripped -Transform: - m_CorrespondingSourceObject: {fileID: -4216859302048453862, guid: 99fcce1f10b2ebd4a8cd31a345f6bed8, - type: 3} - m_PrefabInstance: {fileID: 6817088243126258842} - m_PrefabAsset: {fileID: 0} diff --git a/Assets/Scenes/tyt player/Player.prefab.meta b/Assets/Scenes/tyt player/Player.prefab.meta old mode 100755 new mode 100644 diff --git a/Assets/Scripts/Bots/CharacterFactory.cs b/Assets/Scripts/Bots/CharacterFactory.cs new file mode 100644 index 0000000..ba1f681 --- /dev/null +++ b/Assets/Scripts/Bots/CharacterFactory.cs @@ -0,0 +1,118 @@ +using System.Collections.Generic; +using Unity.Barracuda; +using Unity.MLAgents.Policies; +using UnityEngine; + +public class CharacterFactory : MonoBehaviour +{ + private static CharacterFactory instance; + public static CharacterFactory Instance => instance; + + [SerializeField] private List spawnPointsForDefendersTeam; + [SerializeField] private List spawnPointsForAttackersTeam; + [SerializeField] private GameObject AIPrefab; + [SerializeField] private GameObject PlayerPrefab; + + private List bots = new List(); + public GameObject player { get; private set; } + + private void Awake() + { + if (instance == null) + instance = this; + else + { + Destroy(gameObject); + Debug.LogError("Only 1 Instance"); + } + } + + private void Start() + { + var attcNum = SettingsReader.Instance.GetSettings.NumOfAttackers; + var defNum = SettingsReader.Instance.GetSettings.NumOfDefenders; + var humanDef = SettingsReader.Instance.GetSettings.HasHumanDefender == true ? 1 : 0; + var humanAtc = SettingsReader.Instance.GetSettings.HasHumanAttacker == true ? 1 : 0; + + if (humanAtc == 1 && humanDef == 1) + throw new System.ArgumentException("Can be only one human player"); + + for (int i = 0; i < attcNum - humanAtc; i++) + InstanciateEntity(Team.Attackers, TypeAI.D0DiskAI, + spawnPointsForAttackersTeam[Random.Range(0, spawnPointsForAttackersTeam.Count)]); + for (int i = 0; i < defNum - humanDef; i++) + InstanciateEntity(Team.Defenders, TypeAI.D0DiskAI, + spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)]); + if (humanAtc == 1) + InstanciateEntity(Team.Attackers, TypeAI.HumanAI, + spawnPointsForAttackersTeam[Random.Range(0, spawnPointsForAttackersTeam.Count)]); + if (humanDef == 1) + InstanciateEntity(Team.Defenders, TypeAI.HumanAI, + spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)]); + + GameManager.OnResetScene += ResetCharacters; + } + + private void InstanciateEntity(Team team, TypeAI typeAi, NavPoint spawnPoint) + { + var gameobject = GameObject.Instantiate( + typeAi == TypeAI.HumanAI ? PlayerPrefab : AIPrefab, + spawnPoint.Position, + Quaternion.identity); + gameobject.SetActive(true); + if (team == Team.Attackers) + gameObject.tag = "Attacker"; + else + gameObject.tag = "Defender"; + + if (typeAi == TypeAI.HumanAI) + { + gameobject.GetComponent().GetCharacter.Team = team; + player = gameobject; + } + else + { + var npc = gameobject.GetComponent(); + npc.GetCharacter.Team = team; + npc.SetModel(team.ToString(), ScriptableObject.CreateInstance(), InferenceDevice.Default ); + gameobject.GetComponent().PointStartID = spawnPoint.PointId; + bots.Add(gameobject); + } + } + + public void ReSpawn(ICharacter character, ref Vector3 pos, ref int startPointId) + { + character.ResetCharacter(); + var team = character.GetCharacter.Team; + NavPoint navPoint; + if (team == Team.Attackers) + navPoint = spawnPointsForAttackersTeam[Random.Range(0, spawnPointsForAttackersTeam.Count)]; + else + navPoint = spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)]; + + pos = navPoint.Position; + startPointId = navPoint.PointId; + } + + private void ResetCharacters() + { + foreach (var bot in bots) + { + var npc = bot.GetComponent(); + npc.ResetCharacter(); + if (npc.GetCharacter.Team == Team.Attackers) + bot.transform.position = spawnPointsForAttackersTeam[Random.Range(0, spawnPointsForAttackersTeam.Count)].Position; + else + bot.transform.position = spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)].Position; + } + Player player; + if (TryGetComponent(out player)) + { + player.ResetCharacter(); + if (player.GetCharacter.Team == Team.Attackers) + this.player.transform.position = spawnPointsForAttackersTeam[Random.Range(0, spawnPointsForAttackersTeam.Count)].Position; + else + this.player.transform.position = spawnPointsForDefendersTeam[Random.Range(0, spawnPointsForDefendersTeam.Count)].Position; + } + } +} \ No newline at end of file diff --git a/Assets/Scripts/Bots/CharacterPooler.cs.meta b/Assets/Scripts/Bots/CharacterFactory.cs.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Scripts/Bots/CharacterPooler.cs.meta rename to Assets/Scripts/Bots/CharacterFactory.cs.meta diff --git a/Assets/Scripts/Bots/CharacterPooler.cs b/Assets/Scripts/Bots/CharacterPooler.cs deleted file mode 100755 index 1dc495a..0000000 --- a/Assets/Scripts/Bots/CharacterPooler.cs +++ /dev/null @@ -1,4 +0,0 @@ -public class CharacterPooler -{ - -} \ No newline at end of file diff --git a/Assets/Scripts/Bots/TeamEnum.cs b/Assets/Scripts/Bots/TeamEnum.cs index 68279f5..a2c8a95 100755 --- a/Assets/Scripts/Bots/TeamEnum.cs +++ b/Assets/Scripts/Bots/TeamEnum.cs @@ -2,4 +2,15 @@ { Defenders, Attackers, +} + +public static class TeamExtension +{ + public static Team GetOppositeTeam(this Team team) + { + if (team == Team.Attackers) + return Team.Defenders; + else + return Team.Attackers; + } } \ No newline at end of file diff --git a/Assets/Scripts/Character/Character.cs b/Assets/Scripts/Character/Character.cs new file mode 100644 index 0000000..3713998 --- /dev/null +++ b/Assets/Scripts/Character/Character.cs @@ -0,0 +1,12 @@ +using UnityEngine; +public class Character +{ + public Team Team { get; set; } + public float LastTimeHit = 0; + public CharacterCondition Condition; + + public Character() + { + Condition = new CharacterCondition(); + } +} \ No newline at end of file diff --git a/Assets/Scripts/Sensors/SensorType.cs.meta b/Assets/Scripts/Character/Character.cs.meta old mode 100755 new mode 100644 similarity index 83% rename from Assets/Scripts/Sensors/SensorType.cs.meta rename to Assets/Scripts/Character/Character.cs.meta index 26f4b58..aec9e7e --- a/Assets/Scripts/Sensors/SensorType.cs.meta +++ b/Assets/Scripts/Character/Character.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 8f76201fe6436164789d10350a0fd6e2 +guid: 44d6a17ad31b31241928e1a17e9aba37 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/Assets/Scripts/Character/CharacterCondition.cs b/Assets/Scripts/Character/CharacterCondition.cs index 3844814..2124ddb 100755 --- a/Assets/Scripts/Character/CharacterCondition.cs +++ b/Assets/Scripts/Character/CharacterCondition.cs @@ -1,12 +1,6 @@ using System; using UnityEngine; -public enum NPCState -{ - InCover, - InBlancPoint, - InRunning, -} public class CharacterCondition { @@ -25,7 +19,18 @@ public class CharacterCondition { health = value; OnChangeHealthEvent?.Invoke(value); - } + } + } + + public int GetHealthPointsInQuantile() + { + if (health < 25) + return 0; + else if (health < 50) + return 1; + else if (health < 75) + return 2; + else return 3; } private int armour; public int ArmourPoints @@ -40,6 +45,17 @@ public class CharacterCondition OnChangeArmourEvent?.Invoke(value); } } + public int GetArmourPointsInQuantile() + { + if (armour < 25) + return 0; + else if (armour < 50) + return 1; + else if (armour < 75) + return 2; + else return 3; + } + private int ammo; public int Ammunition { @@ -54,15 +70,17 @@ public class CharacterCondition } } - [HideInInspector] - public NPCState npcState { get; private set; } - public CharacterCondition() + { + this.Reset(); + } + + public void Reset() { var settings = SettingsReader.Instance.GetSettings; - ammo = settings.maxAmmo; - health = settings.maxHealth; - armour = settings.maxArmour; + ammo = settings.MaxAmmo; + health = settings.MaxHealth; + armour = settings.MaxArmour; } public void GiveHealth(int health) => HealthPoints = Mathf.Clamp(health + HealthPoints, 0, 100); diff --git a/Assets/Scripts/Sensors.meta b/Assets/Scripts/Character/Interfaces.meta old mode 100755 new mode 100644 similarity index 77% rename from Assets/Scripts/Sensors.meta rename to Assets/Scripts/Character/Interfaces.meta index a808c2b..cb4a3aa --- a/Assets/Scripts/Sensors.meta +++ b/Assets/Scripts/Character/Interfaces.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 5e73ba257bc6b684c86edf9ecfd475ef +guid: f23b6db3be1e4cd469fd18dfe3e39764 folderAsset: yes DefaultImporter: externalObjects: {} diff --git a/Assets/Scripts/Character/Interfaces/ICharacter.cs b/Assets/Scripts/Character/Interfaces/ICharacter.cs new file mode 100644 index 0000000..aef14a7 --- /dev/null +++ b/Assets/Scripts/Character/Interfaces/ICharacter.cs @@ -0,0 +1,5 @@ +public interface ICharacter +{ + Character GetCharacter { get; } + void ResetCharacter(); +} \ No newline at end of file diff --git a/Assets/Scripts/Sensors/Sensors.cs.meta b/Assets/Scripts/Character/Interfaces/ICharacter.cs.meta similarity index 83% rename from Assets/Scripts/Sensors/Sensors.cs.meta rename to Assets/Scripts/Character/Interfaces/ICharacter.cs.meta index 1109bef..b53c34e 100644 --- a/Assets/Scripts/Sensors/Sensors.cs.meta +++ b/Assets/Scripts/Character/Interfaces/ICharacter.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 4599c57bc5b1c3945847dead0f9f0ba4 +guid: b6dfb78244ae35c4db1326d5f5b73375 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/Assets/Scripts/Character/Interfaces/INpcBaseState.cs b/Assets/Scripts/Character/Interfaces/INpcBaseState.cs new file mode 100644 index 0000000..bf17ad2 --- /dev/null +++ b/Assets/Scripts/Character/Interfaces/INpcBaseState.cs @@ -0,0 +1,17 @@ +using UnityEngine; + +public interface INpcBaseState +{ + NpcEnumState State { get; } + bool InCover { get; } + bool IsRunning { get; } + bool InDirectPoint { get; } + float HitChance { get; } + float DoDamageChance { get; } +} + +public interface INpcBaseBodyState +{ + NpcBodyState State { get; } + Vector3 GetPointToHit(GameObject go); +} \ No newline at end of file diff --git a/Assets/Scripts/Character/Interfaces/INpcBaseState.cs.meta b/Assets/Scripts/Character/Interfaces/INpcBaseState.cs.meta new file mode 100644 index 0000000..f0585e4 --- /dev/null +++ b/Assets/Scripts/Character/Interfaces/INpcBaseState.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 58b7e1962495ada4c8e6ee6219c99a20 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Scripts/Character/MovementController.cs b/Assets/Scripts/Character/MovementController.cs old mode 100755 new mode 100644 index e45bf60..98df2aa --- a/Assets/Scripts/Character/MovementController.cs +++ b/Assets/Scripts/Character/MovementController.cs @@ -1,40 +1,83 @@ -using System.Linq; -using System.Collections.Generic; +using System.Collections.Generic; +using System.Linq; using UnityEngine; using UnityEngine.AI; [RequireComponent(typeof(NavMeshAgent))] public class MovementController : MonoBehaviour { - public NavPoint currentPosition { get; private set; } + public int PointStartID { get; set; } + public int PointEndID { get; private set; } + public float FlagDistance { get; private set; } + private const float UpdateFlagPositionDelay = 5; + private const float UpdateReachedDestinationDelay = 5; + [SerializeField] private NavMeshAgent navMeshAgent; + [SerializeField] private GameObject flag; + public float DistanceToGo { get; private set; } + public float RemainingDistance => navMeshAgent.remainingDistance; + private Dictionary _idNavPointDict; - private void Start() + + private void Awake() { - navMeshAgent.speed = SettingsReader.Instance.GetSettings.movementSpeed; + navMeshAgent.speed = SettingsReader.Instance.GetSettings.MovementSpeed; + _idNavPointDict = MapManager.Instance.IDToNavPoint; + InvokeRepeating(nameof(UpdateFlagPosition), 0, UpdateFlagPositionDelay); + InvokeRepeating(nameof(ReachedDestination), 0, UpdateReachedDestinationDelay); + } + + private void OnDestroy() + { + CancelInvoke(nameof(UpdateFlagPosition)); + CancelInvoke(nameof(ReachedDestination)); } - public void Move() + private void UpdateFlagPosition() { - var pointCandidate = getPointCandidate(); - goToNextNavPoint(pointCandidate); + FlagDistance = (flag.transform.position - gameObject.transform.position).magnitude; } - + public void MoveToRandomPoint() - { - Debug.Log(MapManager.navPoints == null); - goToNextNavPoint(MapManager.navPoints[Random.Range(0, MapManager.navPoints.Count)]); - } - - private NavPoint getPointCandidate() { - var NavPointsPositions = MapManager.navPoints - .Select(point => point.transform.position) - .Where(point => (currentPosition.transform.position - point).magnitude <= SettingsReader.Instance.GetSettings.movementSpeed) - .ToList(); - return null; + GoToNextNavPoint(MapManager.Instance.NavPoints[Random.Range(0, MapManager.Instance.NavPoints.Count)]); } - public void goToNextNavPoint(NavPoint destination) => - navMeshAgent.SetDestination(destination.transform.position); + public List GetPointsCandidate() + { + return MapManager.Instance.NavPoints + .Where(point => + (_idNavPointDict[PointStartID].Position - point.Position).magnitude < SettingsReader.Instance.GetSettings.MovementDistance) + .ToList(); + } + + public void GoToNextNavPoint(NavPoint destination) + { + if (navMeshAgent.isStopped == true) navMeshAgent.isStopped = false; + PointStartID = PointEndID; + PointEndID = destination.PointId; + navMeshAgent.SetDestination(destination.Position); + DistanceToGo = navMeshAgent.remainingDistance; + } + + public void ReturnToStartPoint() + { + if (navMeshAgent.isStopped == true) navMeshAgent.isStopped = false; + navMeshAgent.SetDestination(_idNavPointDict[PointStartID].Position); + PointEndID = PointStartID; + PointStartID = -1; + } + + public void StopOnPath() + { + navMeshAgent.isStopped = true; + PointStartID = -1; + PointEndID = -1; + } + + public void ReachedDestination() + { + if ((navMeshAgent.isStopped == false) && (navMeshAgent.velocity.magnitude < 0.1)) + PointStartID = PointEndID; + } } diff --git a/Assets/Scripts/Character/NPC.cs b/Assets/Scripts/Character/NPC.cs old mode 100755 new mode 100644 index 776c42f..7af2ae9 --- a/Assets/Scripts/Character/NPC.cs +++ b/Assets/Scripts/Character/NPC.cs @@ -1,61 +1,212 @@ using System; -using UnityEngine; +using System.Collections.Generic; using Unity.MLAgents; -using Unity.MLAgents.Sensors; using Unity.MLAgents.Actuators; +using Unity.MLAgents.Sensors; +using UnityEngine; -[RequireComponent(typeof(MovementController))] -public class NPC : Agent +[RequireComponent(typeof(MovementController),typeof(BufferSensorComponent))] +public class NPC : Agent, ICharacter { - public Team Team { get; set; } - [HideInInspector] - private float LastTimeHit; + private Character AgentCharacter; public CharacterCondition Condition; + private FlagZone flagZone = null; - public MovementController moveController; - - private void Start() + public INpcBaseState NpcState { get; private set; } + public INpcBaseBodyState NpcBodyState { get; private set; } + + public Character GetCharacter => AgentCharacter; + + private NpcDirectPointState DirectState; + private NpcInCoverState CoverState; + private NpcRunningState RunningState; + + private NpcStandingState StandingState; + private NpcCrouchingState CrouchingState; + + private MovementController moveController; + private BufferSensorComponent bufferSensor; + + private Dictionary navPointIdDict; + + #region UnityEvents and ML + private void Awake() { - Condition = new CharacterCondition(); + DirectState = new NpcDirectPointState(); + CoverState = new NpcInCoverState(); + RunningState = new NpcRunningState(); + NpcState = DirectState; + + CrouchingState = new NpcCrouchingState(); + StandingState = new NpcStandingState(); + NpcBodyState = StandingState; + + AgentCharacter = new Character(); + Condition = AgentCharacter.Condition; + moveController = gameObject.GetComponent(); + bufferSensor = gameObject.GetComponent(); + + flagZone = GameObject.FindObjectOfType(); + if (flagZone is null) + Debug.LogError("Flag Is Not Set"); + + navPointIdDict = MapManager.Instance.IDToNavPoint; + if (navPointIdDict is null) + Debug.LogError("Cant Find Nav Point Dictionary"); + } + + private void OnDestroy() + { + Debug.LogWarning("Pooled object was destroyed"); } public override void OnEpisodeBegin() { - + if (navPointIdDict is null) + Debug.LogError("Cant Find Nav Point Dictionary"); + + NpcState = DirectState; + flagZone = GameObject.FindObjectOfType(); } public override void CollectObservations(VectorSensor sensor) { - sensor.AddObservation(Condition.HealthPoints); - sensor.AddObservation(Condition.ArmourPoints); - sensor.AddObservation(Condition.Ammunition); - sensor.AddObservation((int)Condition.npcState); - } + // Debug.Log("Collect observations called!"); + navPointIdDict = MapManager.Instance.IDToNavPoint; + if (navPointIdDict is null) + Debug.LogError("Cant Find Nav Point Dictionary"); + var candidates = moveController.GetPointsCandidate(); - public override void Heuristic(in ActionBuffers actionsOut) - { - var discreteActionsOut = actionsOut.DiscreteActions; - if (Input.GetKeyDown(KeyCode.W)) + //common sensors + sensor.AddObservation(GameManager.IsHaveSeenByEnemy(AgentCharacter.Team.GetOppositeTeam(), + NpcBodyState.GetPointToHit(gameObject)).ToInt()); + sensor.AddObservation(AgentCharacter.LastTimeHit); + sensor.AddObservation((!flagZone.IsNotOccup).ToInt()); + sensor.AddObservation(Condition.GetHealthPointsInQuantile()); + sensor.AddObservation(Condition.GetArmourPointsInQuantile()); + sensor.AddObservation(candidates.Count); + sensor.AddObservation(moveController.PointStartID); + sensor.AddObservation(moveController.PointEndID); + // Debug.Log("Done common!"); + //state sensors + sensor.AddObservation((int)NpcState.State); + sensor.AddObservation((int)NpcBodyState.State); + sensor.AddObservation(GameManager.IsEnemyNearby(gameObject.transform.position, AgentCharacter.Team)); + sensor.AddObservation(navPointIdDict[moveController.PointStartID].DeathAttr); + sensor.AddObservation(navPointIdDict[moveController.PointEndID].DeathAttr); + sensor.AddObservation(moveController.FlagDistance); + // Debug.Log("Done state sensors!"); + + //point sensors + foreach (var point in candidates) { - discreteActionsOut[0] = 1; + var position = transform.position; + bufferSensor.AppendObservation(new float[] { + point.DeathAttr, + (int)point.navType, + //4 flagEnemyDistance + GameManager.IsCloserToFlagFromNextNavPoint(point, position).ToInt(), + //5 EnemyVsNavPointDistance + GameManager.IsCloserToEnemyThanToNextNavPoint(point, position, AgentCharacter.Team.GetOppositeTeam()).ToInt(), + //6 Have been seen by enemy in this point + GameManager.IsHaveSeenByEnemy(AgentCharacter.Team.GetOppositeTeam(), + point.Position).ToInt() + }); } + // Debug.Log("Done collect observations!"); } public override void OnActionReceived(ActionBuffers actions) { - if (actions.DiscreteActions[0] == 1) - moveController.MoveToRandomPoint(); + // Debug.Log("Actions recieved!"); + var result = actions.DiscreteActions; + // Debug.Log(result[0] + " " + result[1]); + if (result[0] == 0) + { + if (navPointIdDict[moveController.PointStartID].navType != NavPointType.Cover) + return; + NpcState = CoverState; + + switch (result[1]) + { + case 0: Peek(); break; + case 1: Cover(); break; + case 3: Peek(); moveController.GoToNextNavPoint(navPointIdDict[result[2]]); break; + case 4: NpcState = DirectState; break; + default: throw new ArgumentException("Undefined Action recieved"); + } + } + // Debug.Log(result[0] == 1); + if (result[0] == 1) + { + // Debug.Log("BEFORE SOme shitty if >:("); + if (navPointIdDict[moveController.PointStartID].navType != NavPointType.Direction) + { + // Debug.Log("SOme shitty if >:("); + return; + } + // Debug.Log("FUCK"); + + switch (result[1]) + { + case 0: moveController.GoToNextNavPoint(navPointIdDict[result[2]]); + NpcState = RunningState; Debug.Log("Go to point " + result[2]);break; + case 1: NpcState = DirectState; break; + default: throw new ArgumentException("Undefined Action recieved"); + } + } + + if (result[0] == 2) + { + if (moveController.PointStartID == moveController.PointEndID && moveController.PointEndID != -1) + return; + switch (result[1]) + { + case 0: moveController.StopOnPath(); NpcState = DirectState; break; + case 1: moveController.ReturnToStartPoint(); NpcState = RunningState; break; + default: throw new ArgumentException("Undefined Action recieved"); + } + } + // Debug.Log("Actions processed!"); + } + #endregion + + public event Action OnChangePosition; + private void Peek() + { + OnChangePosition?.Invoke(global::NpcBodyState.Standing); + NpcBodyState = StandingState; } - public event Action OnKilledEvent; - public void GetDamage(float damage) + private void Cover() { + OnChangePosition?.Invoke(global::NpcBodyState.Crouching); + NpcBodyState = CrouchingState; + } + + public event Action OnDamageRecieved; + public void GetDamage(int damage) + { + AgentCharacter.LastTimeHit = TimeManager.Instance.CurrentTime; Condition.GiveHealth(-Mathf.RoundToInt(damage * (1 - Condition.ArmourPoints * 0.5f))); Condition.GiveArmour(-Mathf.RoundToInt(Mathf.Sqrt(damage) * 5)); + OnDamageRecieved?.Invoke(damage, AgentCharacter.Team); if (Condition.HealthPoints < 0) - OnKilledEvent?.Invoke(this); + { + MapManager.AddDeathAttributeToPoints(moveController.PointStartID, moveController.PointEndID, + moveController.DistanceToGo, moveController.RemainingDistance); + var pos = gameObject.transform.position; + var id = moveController.PointStartID; + CharacterFactory.Instance.ReSpawn(this, ref pos, ref id); + } + } + + public void ResetCharacter() + { + Condition.Reset(); + EndEpisode(); } } diff --git a/Assets/Scripts/Character/NPC.cs.meta b/Assets/Scripts/Character/NPC.cs.meta index 2ef6c68..232a1c0 100755 --- a/Assets/Scripts/Character/NPC.cs.meta +++ b/Assets/Scripts/Character/NPC.cs.meta @@ -4,7 +4,7 @@ MonoImporter: externalObjects: {} serializedVersion: 2 defaultReferences: [] - executionOrder: 0 + executionOrder: 200 icon: {instanceID: 0} userData: assetBundleName: diff --git a/Assets/Scripts/Character/NpcState.cs b/Assets/Scripts/Character/NpcState.cs new file mode 100644 index 0000000..51feda6 --- /dev/null +++ b/Assets/Scripts/Character/NpcState.cs @@ -0,0 +1,68 @@ +using UnityEngine; + +public enum NpcEnumState +{ + InCover, + InDirectPoint, + InRunning, +} + +public enum NpcBodyState +{ + Crouching, + Standing, +} + +public class NpcCrouchingState : INpcBaseBodyState +{ + public NpcBodyState State => NpcBodyState.Crouching; + + public Vector3 GetPointToHit(GameObject go) + { + MeshRenderer meshRenderer; + go.TryGetComponent(out meshRenderer); + return meshRenderer.bounds.center; + } +} + +public class NpcStandingState : INpcBaseBodyState +{ + public NpcBodyState State => NpcBodyState.Standing; + + public Vector3 GetPointToHit(GameObject go) + { + MeshRenderer meshRenderer; + go.TryGetComponent(out meshRenderer); + return meshRenderer.bounds.center; + } +} + +public class NpcDirectPointState : INpcBaseState +{ + public bool InCover => false; + public bool IsRunning => false; + public bool InDirectPoint => false; + public float HitChance => SettingsReader.Instance.GetSettings.GetHitChanceInDirectPoint; + public float DoDamageChance => SettingsReader.Instance.GetSettings.DoDamageChanceInDirectPoint; + public NpcEnumState State => NpcEnumState.InDirectPoint; +} + +public class NpcRunningState : INpcBaseState +{ + public bool InCover => false; + public bool IsRunning => true; + public bool InDirectPoint => false; + public float HitChance => SettingsReader.Instance.GetSettings.GetHitChanceInRunning; + public float DoDamageChance => SettingsReader.Instance.GetSettings.DoDamageChanceInRunning; + public NpcEnumState State => NpcEnumState.InRunning; +} + +public class NpcInCoverState : INpcBaseState +{ + public bool InCover => true; + public bool IsRunning => false; + public bool InDirectPoint => false; + public float HitChance => SettingsReader.Instance.GetSettings.GetHitChanceInCover; + public float DoDamageChance => SettingsReader.Instance.GetSettings.DoDamageChanceInCover; + public NpcEnumState State => NpcEnumState.InCover; +} diff --git a/Assets/Scripts/Character/NpcState.cs.meta b/Assets/Scripts/Character/NpcState.cs.meta new file mode 100644 index 0000000..c1483bf --- /dev/null +++ b/Assets/Scripts/Character/NpcState.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: a192e433e26797745ad0b46de2586de3 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Scripts/Character/Player.cs b/Assets/Scripts/Character/Player.cs new file mode 100644 index 0000000..593a326 --- /dev/null +++ b/Assets/Scripts/Character/Player.cs @@ -0,0 +1,38 @@ +using System; +using UnityEngine; + +public class Player : MonoBehaviour, ICharacter +{ + [HideInInspector] + public Character PlayerCharacter; + public CharacterCondition Condition; + + public Character GetCharacter => PlayerCharacter; + + private void Awake() + { + PlayerCharacter = new Character(); + Condition = PlayerCharacter.Condition; + } + + private void OnDestroy() + { + Debug.LogWarning("Pooled object was destroyed"); + } + + public event Action OnKilledEvent; + public void GetDamage(float damage) + { + PlayerCharacter.LastTimeHit = TimeManager.Instance.CurrentTime; + Condition.GiveHealth(-Mathf.RoundToInt(damage * (1 - Condition.ArmourPoints * 0.5f))); + Condition.GiveArmour(-Mathf.RoundToInt(Mathf.Sqrt(damage) * 5)); + + if (Condition.HealthPoints < 0) + OnKilledEvent?.Invoke(this); + } + + public void ResetCharacter() + { + Condition = new CharacterCondition(); + } +} diff --git a/Assets/Scripts/Character/Player.cs.meta b/Assets/Scripts/Character/Player.cs.meta new file mode 100644 index 0000000..63af2cb --- /dev/null +++ b/Assets/Scripts/Character/Player.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: a8c9a8e604d395c4ab9d03d28adc4982 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Scripts/Character/scr_CharacterController.cs b/Assets/Scripts/Character/scr_CharacterController.cs index 25fc631..6435b98 100755 --- a/Assets/Scripts/Character/scr_CharacterController.cs +++ b/Assets/Scripts/Character/scr_CharacterController.cs @@ -5,7 +5,7 @@ using Unity.Barracuda; using UnityEngine; using UnityEngine.InputSystem; -using static scr_Models; +using static scr_Models; public class scr_CharacterController : MonoBehaviour { @@ -17,7 +17,7 @@ public class scr_CharacterController : MonoBehaviour public Vector2 input_Movement; [HideInInspector] public Vector2 input_View; - + private Vector3 newCameraRotation; private Vector3 newCharacterRotation; @@ -25,14 +25,14 @@ public class scr_CharacterController : MonoBehaviour public Transform cameraHolder; public Transform feetTransform; - [Header("Settings")] + [Header("Settings")] public PlayerSettingsModel playerSettings; public float ViewClampYMin = -70; public float ViewClampYMax = 80; public LayerMask playerMask; - - [Header("Gravity")] + + [Header("Gravity")] public float gravityAmount; public float gravityMin; private float playerGravity; @@ -40,14 +40,14 @@ public class scr_CharacterController : MonoBehaviour public Vector3 jumpingForce; private Vector3 jumpingForceVelocity; - [Header("Stance")] + [Header("Stance")] public PlayerStance playerStance; public float playerStanceSmoothing; public CharacterStance playerStandStance; public CharacterStance playerCrouchStance; public CharacterStance playerProneStance; private float stanceCheckErrorMargin = 0.05f; - + private float cameraHeight; private float cameraHeightVelocity; @@ -77,13 +77,13 @@ public class scr_CharacterController : MonoBehaviour defaultInput.Character.Movement.performed += e => input_Movement = e.ReadValue(); defaultInput.Character.View.performed += e => input_View = e.ReadValue(); defaultInput.Character.Jump.performed += e => Jump(); - + defaultInput.Character.Crouch.performed += e => Crouch(); defaultInput.Character.Prone.performed += e => Prone(); - + defaultInput.Character.Sprint.performed += e => ToggleSprint(); defaultInput.Character.SprintReleased.performed += e => StopSprint(); - + defaultInput.Enable(); newCameraRotation = cameraHolder.localRotation.eulerAngles; @@ -134,10 +134,10 @@ public class scr_CharacterController : MonoBehaviour { newCharacterRotation.y += playerSettings.ViewXSensetivity * (playerSettings.ViewXInverted ? -input_View.x : input_View.x) * Time.deltaTime; transform.localRotation = Quaternion.Euler(newCharacterRotation); - + newCameraRotation.x += playerSettings.ViewYSensetivity * (playerSettings.ViewYInverted ? input_View.y : -input_View.y) * Time.deltaTime; newCameraRotation.x = Mathf.Clamp(newCameraRotation.x, ViewClampYMin, ViewClampYMax); - + cameraHolder.localRotation = Quaternion.Euler(newCameraRotation); } @@ -159,18 +159,18 @@ public class scr_CharacterController : MonoBehaviour verticalSpeed = playerSettings.RunningForwardSpeed; horizontalSpeed = playerSettings.RunningStrafeSpeed; } - + // Effectors if (!characterController.isGrounded) { playerSettings.SpeedEffector = playerSettings.FallingSpeedEffector; } - else if(playerStance == PlayerStance.Crouch) + else if (playerStance == PlayerStance.Crouch) { playerSettings.SpeedEffector = playerSettings.CrouchSpeedEffector; - } - else if(playerStance == PlayerStance.Prone) + } + else if (playerStance == PlayerStance.Prone) { playerSettings.SpeedEffector = playerSettings.ProneSpeedEffector; } @@ -188,12 +188,12 @@ public class scr_CharacterController : MonoBehaviour verticalSpeed *= playerSettings.SpeedEffector; horizontalSpeed *= playerSettings.SpeedEffector; - + newMovementSpeed = Vector3.SmoothDamp(newMovementSpeed, new Vector3(horizontalSpeed * input_Movement.x * Time.deltaTime, 0, verticalSpeed * input_Movement.y * Time.deltaTime), ref newMovementSpeedVelocity, characterController.isGrounded ? playerSettings.MovementSmoothing : playerSettings.FallingSmoothing); - + var MovementSpeed = transform.TransformDirection(newMovementSpeed); if (playerGravity > gravityMin) @@ -208,7 +208,7 @@ public class scr_CharacterController : MonoBehaviour MovementSpeed.y += playerGravity; MovementSpeed += jumpingForce * Time.deltaTime; - + characterController.Move(MovementSpeed); } @@ -229,7 +229,7 @@ public class scr_CharacterController : MonoBehaviour { stanceHeight = playerProneStance.CameraHeight; } - + cameraHeight = Mathf.SmoothDamp(cameraHolder.localPosition.y, stanceHeight, ref cameraHeightVelocity, playerStanceSmoothing); cameraHolder.localPosition = new Vector3(cameraHolder.localPosition.x, cameraHeight, cameraHolder.localPosition.z); @@ -240,7 +240,7 @@ public class scr_CharacterController : MonoBehaviour { return; } - + if (playerStance == PlayerStance.Crouch) { if (StanceCheck(playerStandStance.StanceCollider.height)) @@ -250,7 +250,7 @@ public class scr_CharacterController : MonoBehaviour playerStance = PlayerStance.Stand; return; } - + // Jump jumpingForce = Vector3.up * playerSettings.JumpingHeight; playerGravity = 0; @@ -283,8 +283,8 @@ public class scr_CharacterController : MonoBehaviour { var start = new Vector3(feetTransform.position.x, feetTransform.position.y + characterController.radius + stanceCheckErrorMargin, feetTransform.position.z); var end = new Vector3(feetTransform.position.x, feetTransform.position.y - characterController.radius - stanceCheckErrorMargin + stanceCheckheight, feetTransform.position.z); - - + + return Physics.CheckCapsule(start, end, characterController.radius, playerMask); } @@ -297,7 +297,7 @@ public class scr_CharacterController : MonoBehaviour } isSprinting = !isSprinting; } - + private void StopSprint() { if (playerSettings.SprintingHold) @@ -305,5 +305,5 @@ public class scr_CharacterController : MonoBehaviour isSprinting = false; } } - + } diff --git a/Assets/Scripts/Character/scr_Models.cs b/Assets/Scripts/Character/scr_Models.cs index 018856b..7be8e25 100755 --- a/Assets/Scripts/Character/scr_Models.cs +++ b/Assets/Scripts/Character/scr_Models.cs @@ -1,62 +1,45 @@ -using System; +using System; using UnityEngine; public static class scr_Models { - #region Player - - public enum PlayerStance - { - Stand, - Crouch, - Prone - } - - [Serializable] - public class PlayerSettingsModel - { - [Header("View Settings")] - public float ViewXSensetivity; - public float ViewYSensetivity; + #region Player - public bool ViewXInverted; - public bool ViewYInverted; + public enum PlayerStance + { + Stand, + Crouch, + Prone + } - [Header("Movement Settings")] - public bool SprintingHold; - public float MovementSmoothing; - - [Header("Movement - Running")] - public float RunningForwardSpeed; - public float RunningStrafeSpeed; - - [Header("Movement - Walking")] - public float WalkingForwardSpeed; - public float WalkingBackwardSpeed; - public float WalkingStrafeSpeed; + [Serializable] + public class PlayerSettingsModel + { + [Header("View Settings")] + public float ViewXSensetivity; + public float ViewYSensetivity; - [Header("Jumping")] - public float JumpingHeight; - public float JumpingFalloff; - public float FallingSmoothing; + public bool ViewXInverted; + public bool ViewYInverted; - [Header("Speed Effectors")] - public float SpeedEffector = 1; - public float CrouchSpeedEffector; - public float ProneSpeedEffector; - public float FallingSpeedEffector; - } + [Header("Movement Settings")] + public bool SprintingHold; + public float MovementSmoothing; - [Serializable] - public class CharacterStance - { - public float CameraHeight; - public CapsuleCollider StanceCollider; - } + [Header("Movement - Running")] + public float RunningForwardSpeed; + public float RunningStrafeSpeed; - #endregion + [Header("Movement - Walking")] + public float WalkingForwardSpeed; + public float WalkingBackwardSpeed; + public float WalkingStrafeSpeed; - #region - Weapons - + [Header("Jumping")] + public float JumpingHeight; + public float JumpingFalloff; + public float FallingSmoothing; + } [Serializable] public class WeaponSettingsModel @@ -78,5 +61,29 @@ public static class scr_Models public float MovementSwaySmoothing; } - #endregion + [Serializable] + public class CharacterStance + { + public float CameraHeight; + public CapsuleCollider StanceCollider; + } + + #endregion + + #region - Weapons - + + [Serializable] + public class WeaponSettingsModel + { + [Header("Sway")] + public float SwayAmount; + public bool SwayYInverted; + public bool SwayXInverted; + public float SwaySmoothing; + public float SwayResetSmoothing; + public float SwayClampX; + public float SwayClampY; + } + + #endregion } diff --git a/Assets/Scripts/Managers/GameManager.cs b/Assets/Scripts/Managers/GameManager.cs index 613ad1e..431fd81 100755 --- a/Assets/Scripts/Managers/GameManager.cs +++ b/Assets/Scripts/Managers/GameManager.cs @@ -1,57 +1,150 @@ -using System.Collections; -using System.Collections.Generic; -using UnityEditorInternal; +using System; +using Unity.MLAgents; using UnityEngine; public class GameManager : MonoBehaviour { - private static GameManager instance; - public static GameManager Instance { get { return instance; } } + public static GameManager Instance => instance; + + private static SimpleMultiAgentGroup defendersTeam = new SimpleMultiAgentGroup(); + private static SimpleMultiAgentGroup attackersTeam = new SimpleMultiAgentGroup(); private void Awake() { - if (Instance == null) + if (instance is null) instance = this; - else if (Instance == this) + else + { Destroy(gameObject); + Debug.LogError("Only 1 Instance"); + } } private void Start() { - GlobalEventManager.onCaptureFlag += flagCaptured; - GlobalEventManager.onTimeLeft += timeOut; - } + Academy.Instance.OnEnvironmentReset += ResetScene; + GlobalEventManager.OnCaptureFlag += FlagCaptured; + GlobalEventManager.OnTimeLeft += TimeOut; - private void Update() - { - - } - - private void flagCaptured(Team team) - { - switch(team) + var agents = GameObject.FindObjectsOfType(); + foreach (var item in agents) { - case Team.Attackers: - Debug.Log("Attackers Win"); - break; - case Team.Defenders: - Debug.Log("Defenders Win"); - break; - default: - Debug.LogError("Unexpected Team"); - break; + var agent = item as NPC; + if (agent.GetCharacter.Team == Team.Attackers) + attackersTeam.RegisterAgent(item); + else + defendersTeam.RegisterAgent(item); } } - private void timeOut() + private static SimpleMultiAgentGroup getAgentList(Team team) { - Debug.Log("Time is out"); + if (team == Team.Attackers) + return attackersTeam; + else + return defendersTeam; + } + + public static bool IsCloserToEnemyThanToNextNavPoint(NavPoint navPoint, Vector3 currentTransform, Team oppositeTeam) + { + var agentGroup = getAgentList(oppositeTeam); + + var distToNavPoint = (currentTransform - navPoint.Position).magnitude; + foreach (var agent in agentGroup.GetRegisteredAgents()) + if (distToNavPoint > (currentTransform - agent.transform.position).magnitude) + return true; + if ((SettingsReader.Instance.GetSettings.HasHumanAttacker == true && oppositeTeam == Team.Attackers) || + (SettingsReader.Instance.GetSettings.HasHumanDefender == true && oppositeTeam == Team.Defenders)) + { + if (distToNavPoint > (currentTransform - CharacterFactory.Instance.player.transform.position).magnitude) + return true; + } + return false; + } + + public static bool IsEnemyNearby(Vector3 currentTransform, Team oppositeTeam) + { + var agentGroup = getAgentList(oppositeTeam); + + foreach (var agent in agentGroup.GetRegisteredAgents()) + if ((currentTransform - agent.transform.position).magnitude < SettingsReader.Instance.GetSettings.ViewDistance) + return true; + if ((SettingsReader.Instance.GetSettings.HasHumanAttacker == true && oppositeTeam == Team.Attackers) || + (SettingsReader.Instance.GetSettings.HasHumanDefender == true && oppositeTeam == Team.Defenders)) + { + if ((currentTransform - CharacterFactory.Instance.player.transform.position).magnitude < SettingsReader.Instance.GetSettings.ViewDistance) + return true; + } + return false; + } + + public static bool IsCloserToFlagFromNextNavPoint(NavPoint navPoint, Vector3 currentTransform) + => navPoint.FlagDistance < (currentTransform - GameObject.FindGameObjectWithTag("Flag").transform.position).magnitude; + + public static bool IsHaveSeenByEnemy(Team oppositeTeam, Vector3 position) + { + var agentGroup = getAgentList(oppositeTeam); + RaycastHit rayHit = new RaycastHit(); + foreach (var agent in agentGroup.GetRegisteredAgents() ) + { + var npc = agent as NPC; + if (Physics.Raycast(position, + (npc.NpcBodyState.GetPointToHit(npc.gameObject) - position).normalized, + out rayHit, + SettingsReader.Instance.GetSettings.ViewDistance)) + { + if (rayHit.collider.gameObject.GetComponent() != null) + return true; + } + } + if ((SettingsReader.Instance.GetSettings.HasHumanAttacker == true && oppositeTeam == Team.Attackers) || + (SettingsReader.Instance.GetSettings.HasHumanDefender == true && oppositeTeam == Team.Defenders)) + { + var player = CharacterFactory.Instance.player; + if (Physics.Raycast(position, + (player.GetComponent().bounds.center - position).normalized, + out rayHit, + SettingsReader.Instance.GetSettings.ViewDistance)) + { + if (rayHit.collider.gameObject.GetComponent() != null) + return true; + } + } + return false; + } + + private void FlagCaptured(Team team) + { + switch (team) + { + case Team.Attackers: + Debug.Log("Attackers Win"); + ResetScene(); + break; + case Team.Defenders: + Debug.Log("Defenders Win"); + ResetScene(); + break; + } + ResetScene(); + } + + private void TimeOut() + { + ResetScene(); } private void OnDestroy() { - GlobalEventManager.onCaptureFlag -= flagCaptured; - GlobalEventManager.onTimeLeft -= timeOut; + GlobalEventManager.OnCaptureFlag -= FlagCaptured; + GlobalEventManager.OnTimeLeft -= TimeOut; + } + + public static event Action OnResetScene; + private void ResetScene() + { + Debug.Log("Scene Reset"); + OnResetScene?.Invoke(); } } diff --git a/Assets/Scripts/Managers/GlobalEventManager.cs b/Assets/Scripts/Managers/GlobalEventManager.cs index e0461af..753b7d2 100755 --- a/Assets/Scripts/Managers/GlobalEventManager.cs +++ b/Assets/Scripts/Managers/GlobalEventManager.cs @@ -2,18 +2,18 @@ public class GlobalEventManager { - public static event Action onCaptureFlag; + public static event Action OnCaptureFlag; public static void SendCaptureFlag(Team team) { - onCaptureFlag?.Invoke(team); - onCaptureFlag = null; + OnCaptureFlag?.Invoke(team); + OnCaptureFlag = null; } - public static event Action onTimeLeft; + public static event Action OnTimeLeft; public static void SendTimeout() { - onTimeLeft?.Invoke(); - onTimeLeft = null; + OnTimeLeft?.Invoke(); + OnTimeLeft = null; } } diff --git a/Assets/Scripts/Managers/MapManager.cs b/Assets/Scripts/Managers/MapManager.cs index 4a9451b..97a0c75 100755 --- a/Assets/Scripts/Managers/MapManager.cs +++ b/Assets/Scripts/Managers/MapManager.cs @@ -3,15 +3,61 @@ using UnityEngine; public class MapManager : MonoBehaviour { - public static List navPoints { get; private set; } - private void Start() + private static MapManager _instance; + public static MapManager Instance => _instance; + [SerializeField] private List _navPoints; + public List NavPoints { get => _navPoints; private set => _navPoints = value; } + public Dictionary IDToNavPoint {get; private set; } + + private void Awake() { - navPoints = new List(); - var navPointsGameObj = GameObject.FindGameObjectsWithTag("Point"); - foreach (var gameobj in navPointsGameObj) + if (_instance is null) + _instance = this; + else { - Debug.Log(" a "); - navPoints.Add(gameobj.GetComponent()); + Destroy(gameObject); + Debug.LogError("Only 1 Instance"); + } + + NavPoints = new List(); + var navPointSet = GameObject.Find("NavPoint Set"); + var count = navPointSet.transform.childCount; + for (var i=0; i < count; i++) + NavPoints.Add(navPointSet.transform.GetChild(i) + .gameObject.GetComponent()); + print(NavPoints.Count); + NavPointSetToID(); + } + + + private void NavPointSetToID() + { + IDToNavPoint = new Dictionary(); + int i = 0; + foreach (var navPoint in NavPoints) + { + IDToNavPoint.Add(i, navPoint); + navPoint.PointId = i; + i++; } } + + public static void AddDeathAttributeToPoints(int startPoint, int endPoint, + float allDistance, float remainingDistance) + { + var startNavPoint = _instance.IDToNavPoint[startPoint]; + var endNavPoint = _instance.IDToNavPoint[endPoint]; + float coef; + try + { + coef = remainingDistance / allDistance; + } + catch (System.ArithmeticException) + { + Debug.LogError("Path Length is zero"); + return; + } + startNavPoint.DeathAttr += 1 - coef; + endNavPoint.DeathAttr += coef; + } } diff --git a/Assets/Scripts/Managers/TimeManager.cs b/Assets/Scripts/Managers/TimeManager.cs index afdbad8..4aa32a1 100755 --- a/Assets/Scripts/Managers/TimeManager.cs +++ b/Assets/Scripts/Managers/TimeManager.cs @@ -1,11 +1,11 @@ -using System.Collections; -using System.Collections.Generic; -using UnityEngine; +using UnityEngine; public class TimeManager : MonoBehaviour { - public static TimeManager instance = null; - public float CurrentTime; + private static TimeManager instance; + public static TimeManager Instance { get { return instance; } } + + public float CurrentTime { get; private set; } void Start() { if (instance == null) @@ -15,12 +15,14 @@ public class TimeManager : MonoBehaviour } else { - Debug.LogError("Only one Instance"); + Debug.LogError("Only 1 Instance"); Destroy(gameObject); } } void Update() { CurrentTime += Time.deltaTime; + if (CurrentTime > SettingsReader.Instance.GetSettings.TimeOut) + GlobalEventManager.SendTimeout(); } } diff --git a/Assets/Scripts/Misc/FlagZone.cs b/Assets/Scripts/Misc/FlagZone.cs index 8cd4ab4..c66d5bb 100755 --- a/Assets/Scripts/Misc/FlagZone.cs +++ b/Assets/Scripts/Misc/FlagZone.cs @@ -1,6 +1,4 @@ -using System.Collections; -using System.Collections.Generic; -using UnityEngine; +using UnityEngine; public class FlagZone : MonoBehaviour { @@ -9,14 +7,14 @@ public class FlagZone : MonoBehaviour public float TimeStayDefenders { get; private set; } private int occupDefenders; private int occupAttackers; - private bool isOccupBoth => (occupDefenders>0) && (occupAttackers>0); - private bool isNotOccup => (occupDefenders == 0) && (occupAttackers == 0); + public bool IsOccupBoth => (occupDefenders > 0) && (occupAttackers > 0); + public bool IsNotOccup => (occupDefenders == 0) && (occupAttackers == 0); private float timeForWin; private void Start() { - timeForWin = SettingsReader.Instance.GetSettings.timeToWin; + timeForWin = SettingsReader.Instance.GetSettings.TimeToWin; TimeStayAttackers = 0; TimeStayDefenders = 0; occupAttackers = 0; @@ -24,7 +22,7 @@ public class FlagZone : MonoBehaviour } private void OnTriggerEnter(Collider other) { - switch(other.tag) + switch (other.tag) { case "Defender": occupDefenders++; @@ -54,7 +52,7 @@ public class FlagZone : MonoBehaviour } private void Update() { - if (isOccupBoth || isNotOccup) + if (IsOccupBoth || IsNotOccup) { TimeStayAttackers = 0; TimeStayDefenders = 0; @@ -64,7 +62,7 @@ public class FlagZone : MonoBehaviour { TimeStayAttackers += Time.deltaTime; if (TimeStayAttackers > timeForWin) - GlobalEventManager.SendCaptureFlag(Team.Attackers); + GlobalEventManager.SendCaptureFlag(Team.Attackers); } else { diff --git a/Assets/Scripts/Misc/NavPoint.cs b/Assets/Scripts/Misc/NavPoint.cs index 75066c0..a70fd61 100755 --- a/Assets/Scripts/Misc/NavPoint.cs +++ b/Assets/Scripts/Misc/NavPoint.cs @@ -1,27 +1,28 @@ -using System.Collections; -using System.Collections.Generic; +using System.Collections.Generic; +using System; using UnityEngine; +public enum NavPointType +{ + Cover, + Direction, +} + + public class NavPoint : MonoBehaviour { - public Vector3 position => gameObject.transform.position; + public Vector3 Position => gameObject.transform.position; public float FlagDistance { get; private set; } - [System.NonSerialized] public float DeathAttr; - [System.NonSerialized] public List EnemiesSeen; - //Here other attributes; - [SerializeField] - public int PointId; + public NavPointType navType = NavPointType.Direction; - private void Awake() - { - //DO NOT DELETE - } + [HideInInspector] + public int PointId = 0; + public float DeathAttr = 0; + public List EnemiesSeen = new List(); private void Start() { - FlagDistance = (GameObject.FindGameObjectWithTag("Flag").transform.position - position).magnitude; - EnemiesSeen = new List(); - DeathAttr = 0; + FlagDistance = (GameObject.FindGameObjectWithTag("Flag").transform.position - Position).magnitude; } } diff --git a/Assets/Scripts/Misc/Settings.cs b/Assets/Scripts/Misc/Settings.cs index a1e07c3..4e333fc 100755 --- a/Assets/Scripts/Misc/Settings.cs +++ b/Assets/Scripts/Misc/Settings.cs @@ -1,30 +1,41 @@ using UnityEngine; -[CreateAssetMenu(fileName ="Game Settings", menuName = "Game/Settings", order = 51)] +[CreateAssetMenu(fileName = "Game Settings", menuName = "Game/Settings", order = 51)] public class Settings : ScriptableObject { - public bool isTesting; + public bool IsTesting; - public float timeToWin; - public float timeOut; + public float TimeToWin; + public float TimeOut; [Header("movement")] - public float movementDistance; - public float movementSpeed; + public float MovementDistance; + public float MovementSpeed; - public TypeAI defTeamAI; - public TypeAI atcTeamAI; - public int numOfDefenders; - public int numOfAttackers; - public bool hasHumanDefender; - public bool hasHumanAttacker; + public TypeAI DefTeamAI; + public TypeAI AtcTeamAI; + public int NumOfDefenders; + public int NumOfAttackers; + public bool HasHumanDefender; + public bool HasHumanAttacker; - public int healthPickupAmount; - public int armourPickupAmount; - public int ammunitionPickupAmount; - public int pickupsAmount; + public int HealthPickupAmount; + public int ArmourPickupAmount; + public int AmmunitionPickupAmount; + public int PickupsAmount; - public int maxHealth; - public int maxArmour; - public int maxAmmo; + public int MaxHealth; + public int MaxArmour; + public int MaxAmmo; + + public float ViewDistance; + + public float GetHitChanceInDirectPoint; + public float GetHitChanceInRunning; + public float GetHitChanceInCover; + public float DoDamageChanceInDirectPoint; + public float DoDamageChanceInRunning; + public float DoDamageChanceInCover; + + public float CrouchingCoefficient; } diff --git a/Assets/Scripts/Misc/SettingsReader.cs b/Assets/Scripts/Misc/SettingsReader.cs index ab60b87..9e709ad 100755 --- a/Assets/Scripts/Misc/SettingsReader.cs +++ b/Assets/Scripts/Misc/SettingsReader.cs @@ -1,17 +1,21 @@ -using System.Collections; -using System.Collections.Generic; -using UnityEngine; +using UnityEngine; public class SettingsReader : MonoBehaviour { private static SettingsReader instance; - public static SettingsReader Instance { get { return instance; } } + public static SettingsReader Instance => instance; private void Awake() { - instance = this; + if (instance is null) + instance = this; + else + { + Destroy(gameObject); + Debug.LogError("Only 1 Instance"); + } } [SerializeField] private Settings gameSettings; - public Settings GetSettings { get { return gameSettings; } } + public Settings GetSettings => gameSettings; } diff --git a/Assets/Scripts/Misc/Statistics.cs b/Assets/Scripts/Misc/Statistics.cs deleted file mode 100755 index 98be783..0000000 --- a/Assets/Scripts/Misc/Statistics.cs +++ /dev/null @@ -1,9 +0,0 @@ -using UnityEngine; - -public class Statistics : MonoBehaviour -{ - private void Start() - { - - } -} diff --git a/Assets/Scripts/Pickups/AmmoPickUp.cs b/Assets/Scripts/Pickups/AmmoPickUp.cs index dffb264..e7af1ce 100755 --- a/Assets/Scripts/Pickups/AmmoPickUp.cs +++ b/Assets/Scripts/Pickups/AmmoPickUp.cs @@ -1,5 +1,4 @@ -using System; -using UnityEngine; +using UnityEngine; [RequireComponent(typeof(BoxCollider))] public class AmmoPickUp : MonoBehaviour, IPickable @@ -11,9 +10,14 @@ public class AmmoPickUp : MonoBehaviour, IPickable PickObject(other.gameObject); } + private void OnDestroy() + { + Debug.LogWarning("Pooled object was destroyed"); + } + public void PickObject(GameObject obj) { - obj.GetComponent()?.TakeAmmo(SettingsReader.Instance.GetSettings.ammunitionPickupAmount); + obj.GetComponent()?.GetCharacter.Condition.TakeAmmo(SettingsReader.Instance.GetSettings.AmmunitionPickupAmount); gameObject.SetActive(false); } } diff --git a/Assets/Scripts/Pickups/ArmourPickUp.cs b/Assets/Scripts/Pickups/ArmourPickUp.cs index 075896c..b5303f6 100755 --- a/Assets/Scripts/Pickups/ArmourPickUp.cs +++ b/Assets/Scripts/Pickups/ArmourPickUp.cs @@ -1,5 +1,4 @@ -using System; -using UnityEngine; +using UnityEngine; [RequireComponent(typeof(BoxCollider))] public class ArmourPickUp : MonoBehaviour, IPickable @@ -11,9 +10,14 @@ public class ArmourPickUp : MonoBehaviour, IPickable PickObject(other.gameObject); } + private void OnDestroy() + { + Debug.LogWarning("Pooled object was destroyed"); + } + public void PickObject(GameObject obj) { - obj.GetComponent()?.GiveArmour(SettingsReader.Instance.GetSettings.armourPickupAmount); + obj.GetComponent()?.GetCharacter.Condition.GiveArmour(SettingsReader.Instance.GetSettings.ArmourPickupAmount); gameObject.SetActive(false); } } diff --git a/Assets/Scripts/Pickups/HealthPickUp.cs b/Assets/Scripts/Pickups/HealthPickUp.cs index a3fffb4..ba8d136 100755 --- a/Assets/Scripts/Pickups/HealthPickUp.cs +++ b/Assets/Scripts/Pickups/HealthPickUp.cs @@ -1,5 +1,4 @@ -using System; -using UnityEngine; +using UnityEngine; [RequireComponent(typeof(BoxCollider))] public class HealthPickUp : MonoBehaviour, IPickable @@ -11,9 +10,14 @@ public class HealthPickUp : MonoBehaviour, IPickable PickObject(other.gameObject); } + private void OnDestroy() + { + Debug.LogWarning("Pooled object was destroyed"); + } + public void PickObject(GameObject obj) { - obj.GetComponent()?.GiveHealth(SettingsReader.Instance.GetSettings.healthPickupAmount); + obj.GetComponent()?.GetCharacter.Condition.GiveHealth(SettingsReader.Instance.GetSettings.HealthPickupAmount); gameObject.SetActive(false); } } diff --git a/Assets/Scripts/Pickups/IPickable.cs b/Assets/Scripts/Pickups/IPickable.cs index fb218d0..37cced3 100755 --- a/Assets/Scripts/Pickups/IPickable.cs +++ b/Assets/Scripts/Pickups/IPickable.cs @@ -1,7 +1,6 @@ -using System; -using UnityEngine; +using UnityEngine; public interface IPickable { - PickUpType type { get; } + PickUpType type { get; } void PickObject(GameObject obj); } \ No newline at end of file diff --git a/Assets/Scripts/Pickups/PickUpSpawner.cs b/Assets/Scripts/Pickups/PickUpSpawner.cs index 8de3d31..f9c320b 100755 --- a/Assets/Scripts/Pickups/PickUpSpawner.cs +++ b/Assets/Scripts/Pickups/PickUpSpawner.cs @@ -16,10 +16,18 @@ public class PickUpSpawner : MonoBehaviour [SerializeField] private List spawnPoints; + private void Awake() + { + if (instance == null) + instance = this; + else + Destroy(gameObject); + } + private void Start() { pickups = new List(); - var amount = SettingsReader.Instance.GetSettings.pickupsAmount; + var amount = SettingsReader.Instance.GetSettings.PickupsAmount; for (int i = 0; i < amount; i++) pickups.Add(GameObject.Instantiate(healthPrefab, spawnPoints[Random.Range(0, spawnPoints.Count)].transform.position, Quaternion.identity)); for (int i = 0; i < amount; i++) @@ -36,25 +44,25 @@ public class PickUpSpawner : MonoBehaviour private IEnumerator SpawnNewPickUps() { - while(true) + while (true) { GameObject item; - if(IsDisableCheck(out item)) + if (IsDisableCheck(out item)) { yield return new WaitForSeconds(3); if (item != null) { - item.transform.position = spawnPoints[Random.Range(0, spawnPoints.Count)].position; + item.transform.position = spawnPoints[Random.Range(0, spawnPoints.Count)].Position; item.SetActive(true); } } - yield return new WaitForSeconds(2); + yield return new WaitForSeconds(2); } } private bool IsDisableCheck(out GameObject gameobj) { - foreach(var pick in pickups) + foreach (var pick in pickups) { if (!pick.activeInHierarchy) { diff --git a/Assets/Scripts/Sensors/SensorType.cs b/Assets/Scripts/Sensors/SensorType.cs deleted file mode 100755 index 8d85790..0000000 --- a/Assets/Scripts/Sensors/SensorType.cs +++ /dev/null @@ -1,6 +0,0 @@ -public enum SensorType -{ - Visual, - Sound, - Other -} diff --git a/Assets/Scripts/Sensors/Sensors.cs b/Assets/Scripts/Sensors/Sensors.cs deleted file mode 100644 index b38d85f..0000000 --- a/Assets/Scripts/Sensors/Sensors.cs +++ /dev/null @@ -1,4 +0,0 @@ -using System.Collections.Generic; -using Unity.MLAgents.Sensors; - - diff --git a/Assets/Prefabs/DragonFucker.prefab.meta b/Assets/Scripts/Statistics.meta similarity index 57% rename from Assets/Prefabs/DragonFucker.prefab.meta rename to Assets/Scripts/Statistics.meta index 1e8b307..4e72120 100644 --- a/Assets/Prefabs/DragonFucker.prefab.meta +++ b/Assets/Scripts/Statistics.meta @@ -1,6 +1,7 @@ fileFormatVersion: 2 -guid: c886079c5bf3e67408d356ea1a932c5f -PrefabImporter: +guid: 3a9f7f0a9faf11f49a433480722bffc5 +folderAsset: yes +DefaultImporter: externalObjects: {} userData: assetBundleName: diff --git a/Assets/Scripts/Statistics/Logger.cs b/Assets/Scripts/Statistics/Logger.cs new file mode 100644 index 0000000..df7ce3d --- /dev/null +++ b/Assets/Scripts/Statistics/Logger.cs @@ -0,0 +1,19 @@ +using System.IO; +using UnityEngine; + +public class Logger +{ + private const string Directory = "/Logs/"; + private const string BaseName = "Log#"; + + public static void SaveLog(T objToSerialize) + { + var dir = Application.persistentDataPath + Directory; + if (!System.IO.Directory.Exists(dir)) + System.IO.Directory.CreateDirectory(dir); + + var logName = BaseName + (System.IO.Directory.GetFiles(dir).Length + 1).ToString(); + var json = JsonUtility.ToJson(objToSerialize); + File.WriteAllText(dir + logName, json); + } +} \ No newline at end of file diff --git a/Assets/Scripts/Statistics/Logger.cs.meta b/Assets/Scripts/Statistics/Logger.cs.meta new file mode 100644 index 0000000..e455173 --- /dev/null +++ b/Assets/Scripts/Statistics/Logger.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: b3a1cec894fa98b4bbe20470f1e316c4 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Scripts/Statistics/StatisticManager.cs b/Assets/Scripts/Statistics/StatisticManager.cs new file mode 100644 index 0000000..a9c4883 --- /dev/null +++ b/Assets/Scripts/Statistics/StatisticManager.cs @@ -0,0 +1,51 @@ +using UnityEngine; + +internal class Log +{ + public int damageTakenByDefs = 0; + public int damageTakenByAtc = 0; + + public int AtcWin = 0; + public int DefWin = 0; + + public int TimeOuts = 0; +} + +public class StatisticManager : MonoBehaviour +{ + private Log log = new Log(); + private void Awake() + { + foreach (var npc in GameObject.FindObjectsOfType()) + npc.OnDamageRecieved += RegisterDamage; + + GlobalEventManager.OnCaptureFlag += RegisterWin; + GlobalEventManager.OnTimeLeft += RegisterTimeOut; + } + + private void RegisterDamage(int damage, Team team) + { + if (team == Team.Attackers) + log.damageTakenByAtc += damage; + else + log.damageTakenByDefs += damage; + } + + private void RegisterWin(Team team) + { + if (team == Team.Attackers) + log.AtcWin += 1; + else + log.DefWin += 1; + } + + private void RegisterTimeOut() + { + log.TimeOuts += 1; + } + + private void OnApplicationQuit() + { + Logger.SaveLog(log); + } +} diff --git a/Assets/Scripts/Misc/Statistics.cs.meta b/Assets/Scripts/Statistics/StatisticManager.cs.meta old mode 100755 new mode 100644 similarity index 100% rename from Assets/Scripts/Misc/Statistics.cs.meta rename to Assets/Scripts/Statistics/StatisticManager.cs.meta diff --git a/Assets/Scripts/Utils/BoolToInteger.cs b/Assets/Scripts/Utils/BoolToInteger.cs new file mode 100644 index 0000000..0d8090a --- /dev/null +++ b/Assets/Scripts/Utils/BoolToInteger.cs @@ -0,0 +1,7 @@ +public static class BoolExtension +{ + public static int ToInt(this bool _bool) + { + return _bool == true ? 1 : 0; + } +} \ No newline at end of file diff --git a/Assets/Scripts/Utils/BoolToInteger.cs.meta b/Assets/Scripts/Utils/BoolToInteger.cs.meta new file mode 100644 index 0000000..3688775 --- /dev/null +++ b/Assets/Scripts/Utils/BoolToInteger.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: f48fff3c2eda14d4fba923fe8875f651 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/Scripts/Utils/SerializableDictionary.cs b/Assets/Scripts/Utils/SerializableDictionary.cs index c0877b9..8cb258e 100755 --- a/Assets/Scripts/Utils/SerializableDictionary.cs +++ b/Assets/Scripts/Utils/SerializableDictionary.cs @@ -1,10 +1,10 @@ using System; -using System.Linq; using System.Collections; using System.Collections.Generic; using System.Diagnostics; -using UnityEngine; +using System.Linq; using UnityEditor; +using UnityEngine; using UnityObject = UnityEngine.Object; [Serializable, DebuggerDisplay("Count = {Count}")] diff --git a/Assets/Scripts/Weapons/scr_WeaponController.cs b/Assets/Scripts/Weapons/scr_WeaponController.cs index 84e51e1..8e5149a 100755 --- a/Assets/Scripts/Weapons/scr_WeaponController.cs +++ b/Assets/Scripts/Weapons/scr_WeaponController.cs @@ -1,4 +1,4 @@ -using System; +using System; using UnityEditor; using UnityEngine; using UnityEngine.InputSystem; @@ -7,7 +7,7 @@ using static scr_Models; public class scr_WeaponController : MonoBehaviour { private scr_CharacterController characterController; - [Header("Settings")] + [Header("Settings")] public WeaponSettingsModel settings; [Header("References")] @@ -52,9 +52,9 @@ public class scr_WeaponController : MonoBehaviour weaponAnimator.speed = characterController.weaponAnimationSpeed; targetWeaponRotation.y += settings.SwayAmount * (settings.SwayXInverted ? -characterController.input_View.x : characterController.input_View.x) * Time.deltaTime; - targetWeaponRotation.x += settings.SwayAmount * (settings.SwayYInverted ? characterController.input_View.y : -characterController.input_View.y) * Time.deltaTime; + targetWeaponRotation.x += settings.SwayAmount * (settings.SwayYInverted ? characterController.input_View.y : -characterController.input_View.y) * Time.deltaTime; //newWeaponRotation.x = Mathf.Clamp(newWeaponRotation.x, ViewClampYMin, ViewClampYMax); - + targetWeaponRotation.x = Mathf.Clamp(targetWeaponRotation.x, -settings.SwayClampX, settings.SwayClampX); targetWeaponRotation.y = Mathf.Clamp(targetWeaponRotation.y, -settings.SwayClampY, settings.SwayClampY); @@ -77,4 +77,4 @@ public class scr_WeaponController : MonoBehaviour { weaponAnimator.SetBool("isSprinting", characterController.isSprinting); } -} +} \ No newline at end of file diff --git a/Assets/Settings/Game Settings.asset b/Assets/Settings/Game Settings.asset old mode 100755 new mode 100644 index 3652c35..fc07488 --- a/Assets/Settings/Game Settings.asset +++ b/Assets/Settings/Game Settings.asset @@ -12,21 +12,29 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: e2c47233b9062c84482336b145c6891b, type: 3} m_Name: Game Settings m_EditorClassIdentifier: - isTesting: 1 - timeToWin: 5 - timeOut: 1600 - movementDistance: 50 - movementSpeed: 3 - defTeamAI: 0 - atcTeamAI: 0 - numOfDefenders: 0 - numOfAttackers: 0 - hasHumanDefender: 0 - hasHumanAttacker: 0 - healthPickupAmount: 50 - armourPickupAmount: 50 - ammunitionPickupAmount: 120 - pickupsAmount: 2 - maxHealth: 0 - maxArmour: 0 - maxAmmo: 0 + IsTesting: 0 + TimeToWin: 15 + TimeOut: 3600 + MovementDistance: 50 + MovementSpeed: 5 + DefTeamAI: 3 + AtcTeamAI: 3 + NumOfDefenders: 1 + NumOfAttackers: 1 + HasHumanDefender: 0 + HasHumanAttacker: 0 + HealthPickupAmount: 50 + ArmourPickupAmount: 50 + AmmunitionPickupAmount: 60 + PickupsAmount: 0 + MaxHealth: 100 + MaxArmour: 100 + MaxAmmo: 360 + ViewDistance: 100 + GetHitChanceInDirectPoint: 50 + GetHitChanceInRunning: 25 + GetHitChanceInCover: 20 + DoDamageChanceInDirectPoint: 70 + DoDamageChanceInRunning: 30 + DoDamageChanceInCover: 25 + CrouchingCoefficient: 1.4 diff --git a/Docs/api.md b/Docs/api.md new file mode 100644 index 0000000..e69de29 diff --git a/Packages/manifest.json b/Packages/manifest.json index a5b072e..e756f82 100755 --- a/Packages/manifest.json +++ b/Packages/manifest.json @@ -1,17 +1,24 @@ { "dependencies": { + "com.unity.2d.sprite": "1.0.0", + "com.unity.2d.tilemap": "1.0.0", + "com.unity.ads": "3.7.5", + "com.unity.analytics": "3.6.12", "com.unity.collab-proxy": "1.14.12", "com.unity.ide.rider": "1.2.1", "com.unity.ide.visualstudio": "2.0.14", "com.unity.ide.vscode": "1.2.4", "com.unity.inputsystem": "1.3.0", "com.unity.ml-agents": "2.0.1", + "com.unity.multiplayer-hlapi": "1.0.8", "com.unity.probuilder": "4.5.2", + "com.unity.purchasing": "4.1.2", "com.unity.test-framework": "1.1.30", "com.unity.textmeshpro": "2.1.6", "com.unity.timeline": "1.2.18", "com.unity.toolchain.win-x86_64-linux-x86_64": "2.0.0", "com.unity.ugui": "1.0.0", + "com.unity.xr.legacyinputhelpers": "2.1.8", "com.unity.modules.ai": "1.0.0", "com.unity.modules.androidjni": "1.0.0", "com.unity.modules.animation": "1.0.0", diff --git a/Packages/packages-lock.json b/Packages/packages-lock.json index bb10cfc..0bac4b2 100755 --- a/Packages/packages-lock.json +++ b/Packages/packages-lock.json @@ -1,5 +1,35 @@ { "dependencies": { + "com.unity.2d.sprite": { + "version": "1.0.0", + "depth": 0, + "source": "builtin", + "dependencies": {} + }, + "com.unity.2d.tilemap": { + "version": "1.0.0", + "depth": 0, + "source": "builtin", + "dependencies": {} + }, + "com.unity.ads": { + "version": "3.7.5", + "depth": 0, + "source": "registry", + "dependencies": { + "com.unity.ugui": "1.0.0" + }, + "url": "https://packages.unity.com" + }, + "com.unity.analytics": { + "version": "3.6.12", + "depth": 0, + "source": "registry", + "dependencies": { + "com.unity.ugui": "1.0.0" + }, + "url": "https://packages.unity.com" + }, "com.unity.barracuda": { "version": "2.0.0", "depth": 1, @@ -86,6 +116,15 @@ }, "url": "https://packages.unity.com" }, + "com.unity.multiplayer-hlapi": { + "version": "1.0.8", + "depth": 0, + "source": "registry", + "dependencies": { + "nuget.mono-cecil": "0.1.6-preview" + }, + "url": "https://packages.unity.com" + }, "com.unity.probuilder": { "version": "4.5.2", "depth": 0, @@ -95,6 +134,29 @@ }, "url": "https://packages.unity.com" }, + "com.unity.purchasing": { + "version": "4.1.2", + "depth": 0, + "source": "registry", + "dependencies": { + "com.unity.ugui": "1.0.0", + "com.unity.modules.unityanalytics": "1.0.0", + "com.unity.modules.unitywebrequest": "1.0.0", + "com.unity.modules.jsonserialize": "1.0.0", + "com.unity.modules.androidjni": "1.0.0", + "com.unity.services.core": "1.0.1" + }, + "url": "https://packages.unity.com" + }, + "com.unity.services.core": { + "version": "1.0.1", + "depth": 1, + "source": "registry", + "dependencies": { + "com.unity.modules.unitywebrequest": "1.0.0" + }, + "url": "https://packages.unity.com" + }, "com.unity.settings-manager": { "version": "1.0.3", "depth": 1, @@ -169,6 +231,23 @@ "com.unity.modules.imgui": "1.0.0" } }, + "com.unity.xr.legacyinputhelpers": { + "version": "2.1.8", + "depth": 0, + "source": "registry", + "dependencies": { + "com.unity.modules.vr": "1.0.0", + "com.unity.modules.xr": "1.0.0" + }, + "url": "https://packages.unity.com" + }, + "nuget.mono-cecil": { + "version": "0.1.6-preview", + "depth": 1, + "source": "registry", + "dependencies": {}, + "url": "https://packages.unity.com" + }, "com.unity.modules.ai": { "version": "1.0.0", "depth": 0, diff --git a/ProjectSettings/ProjectSettings.asset b/ProjectSettings/ProjectSettings.asset index db6a68e..b6bb648 100644 --- a/ProjectSettings/ProjectSettings.asset +++ b/ProjectSettings/ProjectSettings.asset @@ -631,7 +631,8 @@ PlayerSettings: gcIncremental: 0 assemblyVersionValidation: 1 gcWBarrierValidation: 0 - apiCompatibilityLevelPerPlatform: {} + apiCompatibilityLevelPerPlatform: + Standalone: 3 m_RenderingPath: 1 m_MobileRenderingPath: 1 metroPackageName: Template_3D diff --git a/ProjectSettings/ProjectVersion.txt b/ProjectSettings/ProjectVersion.txt old mode 100755 new mode 100644 diff --git a/StyleCop.Cache b/StyleCop.Cache new file mode 100644 index 0000000..edcafe4 --- /dev/null +++ b/StyleCop.Cache @@ -0,0 +1,7477 @@ + + 12 + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 01:08:14.230 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + Using directives must be sorted alphabetically by the namespaces. + 2 + False + + + The class must have a documentation header. + 5 + False + + + The field must have a documentation header. + 7 + False + + + The property must have a documentation header. + 8 + False + + + Adjacent elements must be separated by a blank line. + 8 + False + + + The property must not be placed on a single line. The opening and closing curly brackets must each be placed on their own line. + 8 + False + + + All properties must be placed after all fields. + 10 + False + + + All properties must be placed after all fields. + 11 + False + + + All properties must be placed after all fields. + 12 + False + + + All properties must be placed after all fields. + 13 + False + + + All properties must be placed after all fields. + 15 + False + + + All properties must be placed after all fields. + 16 + False + + + The call to instance must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 8 + 213 + 220 + 8 + 53 + 8 + 60 + False + + + The field must have a documentation header. + 10 + False + + + The field must have a documentation header. + 11 + False + + + The field must have a documentation header. + 12 + False + + + Variable names and private field names must start with a lower-case letter: AIPrefab. + 12 + False + + + The field must have a documentation header. + 13 + False + + + Variable names and private field names must start with a lower-case letter: PlayerPrefab. + 13 + False + + + The field must have a documentation header. + 15 + False + + + Variable names and private field names must start with a lower-case letter: Bots. + 15 + False + + + The field must have a documentation header. + 16 + False + + + Variable names and private field names must start with a lower-case letter: Player. + 16 + False + + + The method must have a documentation header. + 18 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 21 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 23 + False + + + The call to instance must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 21 + 657 + 664 + 21 + 13 + 21 + 20 + False + + + The call to instance must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 20 + 626 + 633 + 20 + 13 + 20 + 20 + False + + + The call to Destroy must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 23 + 701 + 707 + 23 + 13 + 23 + 19 + False + + + The call to gameObject must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 23 + 709 + 718 + 23 + 21 + 23 + 30 + False + + + The method must have a documentation header. + 26 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 34 + False + + + The body of the for statement must be wrapped in opening and closing curly brackets. + 37 + False + + + The body of the for statement must be wrapped in opening and closing curly brackets. + 40 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 43 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 46 + False + + + All method parameters must be placed on the same line, or each parameter must be placed on a separate line. + 37 + False + + + If the method parameters are on separate lines, the first parameter must begin on the line beneath the name of the method. + 37 + False + + + The parameter must begin on the line after the previous parameter. + 37 + False + + + All method parameters must be placed on the same line, or each parameter must be placed on a separate line. + 40 + False + + + If the method parameters are on separate lines, the first parameter must begin on the line beneath the name of the method. + 40 + False + + + The parameter must begin on the line after the previous parameter. + 40 + False + + + All method parameters must be placed on the same line, or each parameter must be placed on a separate line. + 43 + False + + + If the method parameters are on separate lines, the first parameter must begin on the line beneath the name of the method. + 43 + False + + + The parameter must begin on the line after the previous parameter. + 43 + False + + + All method parameters must be placed on the same line, or each parameter must be placed on a separate line. + 46 + False + + + If the method parameters are on separate lines, the first parameter must begin on the line beneath the name of the method. + 46 + False + + + The parameter must begin on the line after the previous parameter. + 46 + False + + + The call to InstanciateEntity must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 37 + 1298 + 1314 + 37 + 13 + 37 + 29 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 38 + 1366 + 1392 + 38 + 17 + 38 + 43 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 38 + 1410 + 1436 + 38 + 61 + 38 + 87 + False + + + The call to InstanciateEntity must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 40 + 1514 + 1530 + 40 + 13 + 40 + 29 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 41 + 1582 + 1608 + 41 + 17 + 41 + 43 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 41 + 1626 + 1652 + 41 + 61 + 41 + 87 + False + + + The call to InstanciateEntity must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 43 + 1705 + 1721 + 43 + 13 + 43 + 29 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 44 + 1772 + 1798 + 44 + 17 + 44 + 43 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 44 + 1816 + 1842 + 44 + 61 + 44 + 87 + False + + + The call to InstanciateEntity must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 46 + 1895 + 1911 + 46 + 13 + 46 + 29 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 47 + 1962 + 1988 + 47 + 17 + 47 + 43 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 47 + 2006 + 2032 + 47 + 61 + 47 + 87 + False + + + The call to ResetCharacters must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 49 + 2083 + 2097 + 49 + 37 + 49 + 51 + False + + + The method must have a documentation header. + 52 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 60 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 62 + False + + + The call to PlayerPrefab must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 55 + 2289 + 2300 + 55 + 40 + 55 + 51 + False + + + The call to AIPrefab must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 55 + 2304 + 2311 + 55 + 55 + 55 + 62 + False + + + The call to Player must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 67 + 2693 + 2698 + 67 + 13 + 67 + 18 + False + + + The call to Bots must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 73 + 2922 + 2925 + 73 + 13 + 73 + 16 + False + + + The method must have a documentation header. + 77 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 84 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 86 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 93 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 95 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 87 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 84 + 3236 + 3262 + 84 + 42 + 84 + 68 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 84 + 3280 + 3306 + 84 + 86 + 84 + 112 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 86 + 3386 + 3412 + 86 + 42 + 86 + 68 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 86 + 3430 + 3456 + 86 + 86 + 86 + 112 + False + + + The call to Bots must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 79 + 3036 + 3039 + 79 + 29 + 79 + 32 + False + + + The call to Player must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 93 + 3688 + 3693 + 93 + 17 + 93 + 22 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 93 + 3716 + 3742 + 93 + 45 + 93 + 71 + False + + + The call to spawnPointsForAttackersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 93 + 3760 + 3786 + 93 + 89 + 93 + 115 + False + + + The call to Player must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 95 + 3841 + 3846 + 95 + 17 + 95 + 22 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 95 + 3869 + 3895 + 95 + 45 + 95 + 71 + False + + + The call to spawnPointsForDefendersTeam must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 95 + 3913 + 3939 + 95 + 89 + 95 + 115 + False + + + The call to TryGetComponent<Player> must begin with the 'this.', 'base.', 'object.' or 'CharacterFactory.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 89 + 3524 + 3546 + 89 + 13 + 89 + 35 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.026 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The enum must have a documentation header. + 1 + False + + + The enumeration sub-item must have a documentation header. + 3 + False + + + The enumeration sub-item must have a documentation header. + 4 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.027 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The enum must have a documentation header. + 1 + False + + + The enumeration sub-item must have a documentation header. + 3 + False + + + The enumeration sub-item must have a documentation header. + 4 + False + + + The enumeration sub-item must have a documentation header. + 5 + False + + + The enumeration sub-item must have a documentation header. + 6 + False + + + The enumeration sub-item must have a documentation header. + 7 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.18 10:42:05.218 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 2 + False + + + Adjacent elements must be separated by a blank line. + 2 + False + + + All classes must be placed after all interfaces. + 15 + False + + + The property must have a documentation header. + 4 + False + + + All properties must be placed after all fields. + 4 + False + + + All properties must be placed after all constructors. + 4 + False + + + The field must have a documentation header. + 5 + False + + + Adjacent elements must be separated by a blank line. + 5 + False + + + Fields must be declared with private access. Use properties to expose fields. + 5 + False + + + The field must have a documentation header. + 6 + False + + + Fields must be declared with private access. Use properties to expose fields. + 6 + False + + + The constructor must have a documentation header. + 8 + False + + + The call to Condition must begin with the 'this.' prefix to indicate that the item is a member of the class. + 11 + 229 + 237 + 11 + 9 + 11 + 17 + False + + + The interface must have a documentation header. + 15 + False + + + The property must have a documentation header. + 17 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 01:00:00.568 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The code must not contain multiple blank lines in a row. + 4 + False + + + The class must have a documentation header. + 5 + False + + + The event must have a documentation header. + 7 + False + + + All events must be placed after all fields. + 11 + False + + + All events must be placed after all fields. + 35 + False + + + All events must be placed after all fields. + 48 + False + + + All events must be placed after all constructors. + 62 + False + + + The event must have a documentation header. + 8 + False + + + Adjacent elements must be separated by a blank line. + 8 + False + + + The event must have a documentation header. + 9 + False + + + Adjacent elements must be separated by a blank line. + 9 + False + + + The field must have a documentation header. + 11 + False + + + The property must have a documentation header. + 12 + False + + + Adjacent elements must be separated by a blank line. + 12 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 17 + False + + + The call to health must begin with the 'this.' prefix to indicate that the item is a member of the class. + 16 + 342 + 347 + 16 + 20 + 16 + 25 + False + + + Adjacent elements must be separated by a blank line. + 18 + False + + + The call to health must begin with the 'this.' prefix to indicate that the item is a member of the class. + 20 + 408 + 413 + 20 + 13 + 20 + 18 + False + + + The call to OnChangeHealthEvent must begin with the 'this.' prefix to indicate that the item is a member of the class. + 21 + 437 + 455 + 21 + 13 + 21 + 31 + False + + + The method must have a documentation header. + 25 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 28 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 30 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 32 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 33 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 34 + False + + + All methods must be placed after all properties. + 36 + False + + + All methods must be placed after all properties. + 49 + False + + + The call to health must begin with the 'this.' prefix to indicate that the item is a member of the class. + 27 + 558 + 563 + 27 + 13 + 27 + 18 + False + + + The call to health must begin with the 'this.' prefix to indicate that the item is a member of the class. + 29 + 612 + 617 + 29 + 18 + 29 + 23 + False + + + The call to health must begin with the 'this.' prefix to indicate that the item is a member of the class. + 31 + 666 + 671 + 31 + 18 + 31 + 23 + False + + + The field must have a documentation header. + 35 + False + + + Adjacent elements must be separated by a blank line. + 35 + False + + + The property must have a documentation header. + 36 + False + + + Adjacent elements must be separated by a blank line. + 36 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 47 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 41 + False + + + The call to armour must begin with the 'this.' prefix to indicate that the item is a member of the class. + 40 + 843 + 848 + 40 + 20 + 40 + 25 + False + + + Adjacent elements must be separated by a blank line. + 42 + False + + + The call to armour must begin with the 'this.' prefix to indicate that the item is a member of the class. + 44 + 907 + 912 + 44 + 13 + 44 + 18 + False + + + The call to OnChangeArmourEvent must begin with the 'this.' prefix to indicate that the item is a member of the class. + 45 + 936 + 954 + 45 + 13 + 45 + 31 + False + + + The field must have a documentation header. + 48 + False + + + Adjacent elements must be separated by a blank line. + 48 + False + + + The property must have a documentation header. + 49 + False + + + Adjacent elements must be separated by a blank line. + 49 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 54 + False + + + The call to ammo must begin with the 'this.' prefix to indicate that the item is a member of the class. + 53 + 1091 + 1094 + 53 + 20 + 53 + 23 + False + + + Adjacent elements must be separated by a blank line. + 55 + False + + + The call to ammo must begin with the 'this.' prefix to indicate that the item is a member of the class. + 57 + 1153 + 1156 + 57 + 13 + 57 + 16 + False + + + The call to OnChangeAmmunitionEvent must begin with the 'this.' prefix to indicate that the item is a member of the class. + 58 + 1180 + 1202 + 58 + 13 + 58 + 35 + False + + + The constructor must have a documentation header. + 62 + False + + + The call to ammo must begin with the 'this.' prefix to indicate that the item is a member of the class. + 65 + 1350 + 1353 + 65 + 9 + 65 + 12 + False + + + The call to health must begin with the 'this.' prefix to indicate that the item is a member of the class. + 66 + 1384 + 1389 + 66 + 9 + 66 + 14 + False + + + The call to armour must begin with the 'this.' prefix to indicate that the item is a member of the class. + 67 + 1422 + 1427 + 67 + 9 + 67 + 14 + False + + + The method must have a documentation header. + 70 + False + + + The call to HealthPoints must begin with the 'this.' prefix to indicate that the item is a member of the class. + 70 + 1503 + 1514 + 70 + 43 + 70 + 54 + False + + + The call to HealthPoints must begin with the 'this.' prefix to indicate that the item is a member of the class. + 70 + 1539 + 1550 + 70 + 79 + 70 + 90 + False + + + The method must have a documentation header. + 71 + False + + + Adjacent elements must be separated by a blank line. + 71 + False + + + The call to ArmourPoints must begin with the 'this.' prefix to indicate that the item is a member of the class. + 71 + 1605 + 1616 + 71 + 43 + 71 + 54 + False + + + The call to ArmourPoints must begin with the 'this.' prefix to indicate that the item is a member of the class. + 71 + 1641 + 1652 + 71 + 79 + 71 + 90 + False + + + The method must have a documentation header. + 72 + False + + + Adjacent elements must be separated by a blank line. + 72 + False + + + The call to Ammunition must begin with the 'this.' prefix to indicate that the item is a member of the class. + 72 + 1703 + 1712 + 72 + 39 + 72 + 48 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.28 18:24:37.771 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + Using directives must be sorted alphabetically by the namespaces. + 1 + False + + + System using directives must be placed before all other using directives. + 5 + False + + + The class must have a documentation header. + 8 + False + + + The code must not contain multiple blank lines in a row. + 17 + False + + + The property must have a documentation header. + 10 + False + + + All properties must be placed after all fields. + 13 + False + + + All properties must be placed after all fields. + 14 + False + + + All properties must be placed after all fields. + 15 + False + + + The property must have a documentation header. + 11 + False + + + Adjacent elements must be separated by a blank line. + 11 + False + + + The property must have a documentation header. + 12 + False + + + Adjacent elements must be separated by a blank line. + 12 + False + + + The field must have a documentation header. + 13 + False + + + Adjacent elements must be separated by a blank line. + 13 + False + + + The field must have a documentation header. + 14 + False + + + Constants must start with an upper-case letter: updateFlagPositionDelay. + 14 + False + + + The field must have a documentation header. + 15 + False + + + The method must have a documentation header. + 18 + False + + + All private methods must be placed after all public methods. + 34 + False + + + All private methods must be placed after all public methods. + 40 + False + + + All private methods must be placed after all public methods. + 47 + False + + + The call to navMeshAgent must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 20 + 555 + 566 + 20 + 9 + 20 + 20 + False + + + The call to InvokeRepeating must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 21 + 636 + 650 + 21 + 9 + 21 + 23 + False + + + The call to UpdateFlagPosition must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 21 + 659 + 676 + 21 + 32 + 21 + 49 + False + + + The method must have a documentation header. + 24 + False + + + The call to CancelInvoke must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 26 + 764 + 775 + 26 + 9 + 26 + 20 + False + + + The call to UpdateFlagPosition must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 26 + 784 + 801 + 26 + 29 + 26 + 46 + False + + + The method must have a documentation header. + 29 + False + + + The call to FlagDistance must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 31 + 870 + 881 + 31 + 9 + 31 + 20 + False + + + The call to flag must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 31 + 886 + 889 + 31 + 25 + 31 + 28 + False + + + The method must have a documentation header. + 34 + False + + + The call to goToNextNavPoint must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 37 + 1069 + 1084 + 37 + 9 + 37 + 24 + False + + + The method must have a documentation header. + 40 + False + + + method names begin with an upper-case letter: getPointsCandidate. + 40 + False + + + The method must have a documentation header. + 47 + False + + + method names begin with an upper-case letter: goToNextNavPoint. + 47 + False + + + The call to navMeshAgent must begin with the 'this.', 'base.', 'object.' or 'MovementController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 48 + 1492 + 1503 + 48 + 9 + 48 + 20 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.18 09:03:31.781 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The enum must have a documentation header. + 1 + False + + + The enumeration sub-item must have a documentation header. + 3 + False + + + The enumeration sub-item must have a documentation header. + 4 + False + + + The enumeration sub-item must have a documentation header. + 5 + False + + + The interface must have a documentation header. + 8 + False + + + Interface names must start with the capital letter 'I': NPC_BaseState. + 8 + False + + + The property must have a documentation header. + 10 + False + + + The property must have a documentation header. + 11 + False + + + Adjacent elements must be separated by a blank line. + 11 + False + + + The property must have a documentation header. + 12 + False + + + Adjacent elements must be separated by a blank line. + 12 + False + + + The property must have a documentation header. + 13 + False + + + Adjacent elements must be separated by a blank line. + 13 + False + + + The property must have a documentation header. + 14 + False + + + Adjacent elements must be separated by a blank line. + 14 + False + + + The property must have a documentation header. + 15 + False + + + Adjacent elements must be separated by a blank line. + 15 + False + + + The class must have a documentation header. + 18 + False + + + The property must have a documentation header. + 20 + False + + + The property must have a documentation header. + 21 + False + + + Adjacent elements must be separated by a blank line. + 21 + False + + + The property must have a documentation header. + 22 + False + + + Adjacent elements must be separated by a blank line. + 22 + False + + + The property must have a documentation header. + 23 + False + + + Adjacent elements must be separated by a blank line. + 23 + False + + + The property must have a documentation header. + 24 + False + + + Adjacent elements must be separated by a blank line. + 24 + False + + + The property must have a documentation header. + 25 + False + + + Adjacent elements must be separated by a blank line. + 25 + False + + + The class must have a documentation header. + 28 + False + + + A C# document may only contain a single class at the root level unless all of the classes are partial and are of the same type. + 28 + False + + + The property must have a documentation header. + 30 + False + + + The property must have a documentation header. + 31 + False + + + Adjacent elements must be separated by a blank line. + 31 + False + + + The property must have a documentation header. + 32 + False + + + Adjacent elements must be separated by a blank line. + 32 + False + + + The property must have a documentation header. + 33 + False + + + Adjacent elements must be separated by a blank line. + 33 + False + + + The property must have a documentation header. + 34 + False + + + Adjacent elements must be separated by a blank line. + 34 + False + + + The property must have a documentation header. + 35 + False + + + Adjacent elements must be separated by a blank line. + 35 + False + + + The class must have a documentation header. + 38 + False + + + The property must have a documentation header. + 40 + False + + + The property must have a documentation header. + 41 + False + + + Adjacent elements must be separated by a blank line. + 41 + False + + + The property must have a documentation header. + 42 + False + + + Adjacent elements must be separated by a blank line. + 42 + False + + + The property must have a documentation header. + 43 + False + + + Adjacent elements must be separated by a blank line. + 43 + False + + + The property must have a documentation header. + 44 + False + + + Adjacent elements must be separated by a blank line. + 44 + False + + + The property must have a documentation header. + 45 + False + + + Adjacent elements must be separated by a blank line. + 45 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.18 10:43:46.328 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 4 + False + + + The field must have a documentation header. + 7 + False + + + Fields must be declared with private access. Use properties to expose fields. + 7 + False + + + The field must have a documentation header. + 8 + False + + + Fields must be declared with private access. Use properties to expose fields. + 8 + False + + + The property must have a documentation header. + 10 + False + + + All properties must be placed after all events. + 23 + False + + + The call to PlayerCharacter must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 10 + 232 + 246 + 10 + 38 + 10 + 52 + False + + + The method must have a documentation header. + 12 + False + + + All private methods must be placed after all public methods. + 18 + False + + + All private methods must be placed after all public methods. + 24 + False + + + The call to PlayerCharacter must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 14 + 293 + 307 + 14 + 9 + 14 + 23 + False + + + The call to Condition must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 15 + 337 + 345 + 15 + 9 + 15 + 17 + False + + + The call to PlayerCharacter must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 15 + 349 + 363 + 15 + 21 + 15 + 35 + False + + + The method must have a documentation header. + 18 + False + + + The call to Condition must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 20 + 435 + 443 + 20 + 9 + 20 + 17 + False + + + The event must have a documentation header. + 23 + False + + + The method must have a documentation header. + 24 + False + + + Adjacent elements must be separated by a blank line. + 24 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 31 + False + + + Insert parentheses within the arithmetic expression to declare the operator precedence. + 27 + False + + + The call to PlayerCharacter must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 26 + 587 + 601 + 26 + 9 + 26 + 23 + False + + + The call to Condition must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 27 + 660 + 668 + 27 + 9 + 27 + 17 + False + + + The call to Condition must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 27 + 713 + 721 + 27 + 62 + 27 + 70 + False + + + The call to Condition must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 28 + 756 + 764 + 28 + 9 + 28 + 17 + False + + + The call to OnKilledEvent must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 31 + 884 + 896 + 31 + 13 + 31 + 25 + False + + + The call to Condition must begin with the 'this.', 'base.', 'object.' or 'Player.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 30 + 836 + 844 + 30 + 13 + 30 + 21 + False + + + The method must have a documentation header. + 34 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.030 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 9 + False + + + An opening curly bracket must not be followed by a blank line. + 10 + False + + + A closing curly bracket must not be preceded by a blank line. + 259 + False + + + class names begin with an upper-case letter: scr_CharacterController. + 9 + False + + + The field must have a documentation header. + 12 + False + + + All private fields must be placed after all public fields. + 16 + False + + + All private fields must be placed after all public fields. + 22 + False + + + All private fields must be placed after all public fields. + 23 + False + + + All private fields must be placed after all public fields. + 26 + False + + + All private fields must be placed after all public fields. + 28 + False + + + All private fields must be placed after all public fields. + 29 + False + + + All private fields must be placed after all public fields. + 30 + False + + + All private fields must be placed after all public fields. + 33 + False + + + All private fields must be placed after all public fields. + 34 + False + + + All private fields must be placed after all public fields. + 37 + False + + + All private fields must be placed after all public fields. + 41 + False + + + All private fields must be placed after all public fields. + 42 + False + + + All private fields must be placed after all public fields. + 43 + False + + + All private fields must be placed after all public fields. + 44 + False + + + All private fields must be placed after all public fields. + 45 + False + + + All private fields must be placed after all public fields. + 56 + False + + + The field must have a documentation header. + 13 + False + + + The field must have a documentation header. + 14 + False + + + Field names must not contain underscores. + 14 + False + + + The field must have a documentation header. + 16 + False + + + Fields must be declared with private access. Use properties to expose fields. + 16 + False + + + Field names must not contain underscores. + 16 + False + + + Public and internal fields must start with an upper-case letter: input_View. + 16 + False + + + The field must have a documentation header. + 18 + False + + + The field must have a documentation header. + 19 + False + + + The field must have a documentation header. + 22 + False + + + Fields must be declared with private access. Use properties to expose fields. + 22 + False + + + Public and internal fields must start with an upper-case letter: cameraHolder. + 22 + False + + + The field must have a documentation header. + 23 + False + + + Fields must be declared with private access. Use properties to expose fields. + 23 + False + + + Public and internal fields must start with an upper-case letter: feetTransform. + 23 + False + + + The field must have a documentation header. + 26 + False + + + Fields must be declared with private access. Use properties to expose fields. + 26 + False + + + Public and internal fields must start with an upper-case letter: playerSettings. + 26 + False + + + The field must have a documentation header. + 28 + False + + + Fields must be declared with private access. Use properties to expose fields. + 28 + False + + + The field must have a documentation header. + 29 + False + + + Fields must be declared with private access. Use properties to expose fields. + 29 + False + + + The field must have a documentation header. + 30 + False + + + Fields must be declared with private access. Use properties to expose fields. + 30 + False + + + Public and internal fields must start with an upper-case letter: playerMask. + 30 + False + + + The field must have a documentation header. + 33 + False + + + Fields must be declared with private access. Use properties to expose fields. + 33 + False + + + Public and internal fields must start with an upper-case letter: gravityAmount. + 33 + False + + + The field must have a documentation header. + 34 + False + + + Fields must be declared with private access. Use properties to expose fields. + 34 + False + + + Public and internal fields must start with an upper-case letter: gravityMin. + 34 + False + + + The field must have a documentation header. + 35 + False + + + The field must have a documentation header. + 37 + False + + + Fields must be declared with private access. Use properties to expose fields. + 37 + False + + + Public and internal fields must start with an upper-case letter: jumpingForce. + 37 + False + + + The field must have a documentation header. + 38 + False + + + The field must have a documentation header. + 41 + False + + + Fields must be declared with private access. Use properties to expose fields. + 41 + False + + + Public and internal fields must start with an upper-case letter: playerStance. + 41 + False + + + The field must have a documentation header. + 42 + False + + + Fields must be declared with private access. Use properties to expose fields. + 42 + False + + + Public and internal fields must start with an upper-case letter: playerStanceSmoothing. + 42 + False + + + The field must have a documentation header. + 43 + False + + + Fields must be declared with private access. Use properties to expose fields. + 43 + False + + + Public and internal fields must start with an upper-case letter: playerStandStance. + 43 + False + + + The field must have a documentation header. + 44 + False + + + Fields must be declared with private access. Use properties to expose fields. + 44 + False + + + Public and internal fields must start with an upper-case letter: playerCrouchStance. + 44 + False + + + The field must have a documentation header. + 45 + False + + + Fields must be declared with private access. Use properties to expose fields. + 45 + False + + + Public and internal fields must start with an upper-case letter: playerProneStance. + 45 + False + + + The field must have a documentation header. + 46 + False + + + The field must have a documentation header. + 48 + False + + + The field must have a documentation header. + 49 + False + + + The field must have a documentation header. + 51 + False + + + The field must have a documentation header. + 53 + False + + + The field must have a documentation header. + 54 + False + + + The field must have a documentation header. + 56 + False + + + Fields must be declared with private access. Use properties to expose fields. + 56 + False + + + Public and internal fields must start with an upper-case letter: currentWeapon. + 56 + False + + + The method must have a documentation header. + 57 + False + + + Adjacent elements must be separated by a blank line. + 57 + False + + + A closing curly bracket must not be preceded by a blank line. + 84 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 59 + 1597 + 1608 + 59 + 9 + 59 + 20 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 61 + 1643 + 1654 + 61 + 9 + 61 + 20 + False + + + The call to input_Movement must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 61 + 1693 + 1706 + 61 + 59 + 61 + 72 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 62 + 1743 + 1754 + 62 + 9 + 62 + 20 + False + + + The call to input_View must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 62 + 1789 + 1798 + 62 + 55 + 62 + 64 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 63 + 1835 + 1846 + 63 + 9 + 63 + 20 + False + + + The call to Jump must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 63 + 1881 + 1884 + 63 + 55 + 63 + 58 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 65 + 1908 + 1919 + 65 + 9 + 65 + 20 + False + + + The call to Crouch must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 65 + 1956 + 1961 + 65 + 57 + 65 + 62 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 66 + 1975 + 1986 + 66 + 9 + 66 + 20 + False + + + The call to Prone must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 66 + 2022 + 2026 + 66 + 56 + 66 + 60 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 68 + 2050 + 2061 + 68 + 9 + 68 + 20 + False + + + The call to ToggleSprint must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 68 + 2098 + 2109 + 68 + 57 + 68 + 68 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 69 + 2123 + 2134 + 69 + 9 + 69 + 20 + False + + + The call to StopSprint must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 69 + 2179 + 2188 + 69 + 65 + 69 + 74 + False + + + The call to defaultInput must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 71 + 2210 + 2221 + 71 + 9 + 71 + 20 + False + + + The call to newCameraRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 73 + 2244 + 2260 + 73 + 9 + 73 + 25 + False + + + The call to cameraHolder must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 73 + 2264 + 2275 + 73 + 29 + 73 + 40 + False + + + The call to newCharacterRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 74 + 2313 + 2332 + 74 + 9 + 74 + 28 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 75 + 2382 + 2400 + 75 + 9 + 75 + 27 + False + + + The call to GetComponent<CharacterController> must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 75 + 2404 + 2436 + 75 + 31 + 75 + 63 + False + + + The call to cameraHeight must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 77 + 2452 + 2463 + 77 + 9 + 77 + 20 + False + + + The call to cameraHolder must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 77 + 2467 + 2478 + 77 + 24 + 77 + 35 + False + + + The call to currentWeapon must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 81 + 2551 + 2563 + 81 + 13 + 81 + 25 + False + + + The call to currentWeapon must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 79 + 2512 + 2524 + 79 + 13 + 79 + 25 + False + + + The method must have a documentation header. + 86 + False + + + The call to CalculateView must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 88 + 2656 + 2668 + 88 + 9 + 88 + 21 + False + + + The call to CalculateMovement must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 89 + 2682 + 2698 + 89 + 9 + 89 + 25 + False + + + The call to CalculateJump must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 90 + 2712 + 2724 + 90 + 9 + 90 + 21 + False + + + The call to CalculateCameraHeight must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 91 + 2738 + 2758 + 91 + 9 + 91 + 29 + False + + + The method must have a documentation header. + 94 + False + + + The call to newCharacterRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 96 + 2822 + 2841 + 96 + 9 + 96 + 28 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 96 + 2848 + 2861 + 96 + 35 + 96 + 48 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 96 + 2883 + 2896 + 96 + 70 + 96 + 83 + False + + + The call to input_View must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 96 + 2915 + 2924 + 96 + 102 + 96 + 111 + False + + + The call to input_View must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 96 + 2930 + 2939 + 96 + 117 + 96 + 126 + False + + + The call to newCharacterRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 97 + 3014 + 3033 + 97 + 52 + 97 + 71 + False + + + The call to newCameraRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 99 + 3056 + 3072 + 99 + 9 + 99 + 25 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 99 + 3079 + 3092 + 99 + 32 + 99 + 45 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 99 + 3114 + 3127 + 99 + 67 + 99 + 80 + False + + + The call to input_View must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 99 + 3145 + 3154 + 99 + 98 + 99 + 107 + False + + + The call to input_View must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 99 + 3161 + 3170 + 99 + 114 + 99 + 123 + False + + + The call to newCameraRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 100 + 3202 + 3218 + 100 + 9 + 100 + 25 + False + + + The call to newCameraRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 100 + 3236 + 3252 + 100 + 43 + 100 + 59 + False + + + The call to ViewClampYMin must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 100 + 3257 + 3269 + 100 + 64 + 100 + 76 + False + + + The call to ViewClampYMax must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 100 + 3272 + 3284 + 100 + 79 + 100 + 91 + False + + + The call to cameraHolder must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 102 + 3307 + 3318 + 102 + 9 + 102 + 20 + False + + + The call to newCameraRotation must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 102 + 3353 + 3369 + 102 + 55 + 102 + 71 + False + + + The method must have a documentation header. + 105 + False + + + Variable names and private field names must start with a lower-case letter: MovementSpeed. + 147 + False + + + All method parameters must be placed on the same line, or each parameter must be placed on a separate line. + 142 + False + + + If the method parameters are on separate lines, the first parameter must begin on the line beneath the name of the method. + 142 + False + + + The parameter must begin on the line after the previous parameter. + 145 + False + + + All method parameters must be placed on the same line, or each parameter must be placed on a separate line. + 143 + False + + + If the method parameters are on separate lines, the first parameter must begin on the line beneath the name of the method. + 143 + False + + + The parameter must begin on the line after the previous parameter. + 144 + False + + + The call to isSprinting must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 109 + 3490 + 3500 + 109 + 13 + 109 + 23 + False + + + The call to input_Movement must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 107 + 3440 + 3453 + 107 + 13 + 107 + 26 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 112 + 3561 + 3574 + 112 + 29 + 112 + 42 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 113 + 3628 + 3641 + 113 + 31 + 113 + 44 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 117 + 3731 + 3744 + 117 + 29 + 117 + 42 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 118 + 3798 + 3811 + 118 + 31 + 118 + 44 + False + + + The call to isSprinting must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 115 + 3678 + 3688 + 115 + 13 + 115 + 23 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 124 + 3946 + 3959 + 124 + 13 + 124 + 26 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 124 + 3977 + 3990 + 124 + 44 + 124 + 57 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 122 + 3890 + 3908 + 122 + 14 + 122 + 32 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 128 + 4103 + 4116 + 128 + 13 + 128 + 26 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 128 + 4134 + 4147 + 128 + 44 + 128 + 57 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 126 + 4042 + 4053 + 126 + 17 + 126 + 28 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 132 + 4259 + 4272 + 132 + 13 + 132 + 26 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 132 + 4290 + 4303 + 132 + 44 + 132 + 57 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 130 + 4199 + 4210 + 130 + 17 + 130 + 28 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 136 + 4374 + 4387 + 136 + 13 + 136 + 26 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 139 + 4455 + 4468 + 139 + 26 + 139 + 39 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 140 + 4513 + 4526 + 140 + 28 + 140 + 41 + False + + + The call to newMovementSpeed must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 142 + 4562 + 4577 + 142 + 9 + 142 + 24 + False + + + The call to newMovementSpeed must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 142 + 4600 + 4615 + 142 + 47 + 142 + 62 + False + + + The call to input_Movement must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 143 + 4656 + 4669 + 143 + 38 + 143 + 51 + False + + + The call to input_Movement must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 144 + 4721 + 4734 + 144 + 30 + 144 + 43 + False + + + The call to newMovementSpeedVelocity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 145 + 4774 + 4797 + 145 + 17 + 145 + 40 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 145 + 4800 + 4818 + 145 + 43 + 145 + 61 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 145 + 4833 + 4846 + 145 + 76 + 145 + 89 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 145 + 4868 + 4881 + 145 + 111 + 145 + 124 + False + + + The call to newMovementSpeed must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 147 + 4970 + 4985 + 147 + 58 + 147 + 73 + False + + + The call to playerGravity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 151 + 5056 + 5068 + 151 + 13 + 151 + 25 + False + + + The call to gravityAmount must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 151 + 5073 + 5085 + 151 + 30 + 151 + 42 + False + + + The call to playerGravity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 149 + 5004 + 5016 + 149 + 13 + 149 + 25 + False + + + The call to gravityMin must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 149 + 5020 + 5029 + 149 + 29 + 149 + 38 + False + + + The call to playerGravity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 156 + 5212 + 5224 + 156 + 13 + 156 + 25 + False + + + The call to playerGravity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 154 + 5131 + 5143 + 154 + 13 + 154 + 25 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 154 + 5156 + 5174 + 154 + 38 + 154 + 56 + False + + + The call to playerGravity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 159 + 5276 + 5288 + 159 + 28 + 159 + 40 + False + + + The call to jumpingForce must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 160 + 5317 + 5328 + 160 + 26 + 160 + 37 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 162 + 5367 + 5385 + 162 + 9 + 162 + 27 + False + + + The spacing around the keyword 'if' is invalid. + 126 + 4039 + 4040 + 126 + 14 + 126 + 15 + False + + + The spacing around the keyword 'if' is invalid. + 130 + 4196 + 4197 + 130 + 14 + 130 + 15 + False + + + The method must have a documentation header. + 165 + False + + + The call to jumpingForce must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 167 + 5467 + 5478 + 167 + 9 + 167 + 20 + False + + + The call to jumpingForce must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 167 + 5501 + 5512 + 167 + 43 + 167 + 54 + False + + + The call to jumpingForceVelocity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 167 + 5533 + 5552 + 167 + 75 + 167 + 94 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 167 + 5555 + 5568 + 167 + 97 + 167 + 110 + False + + + The method must have a documentation header. + 170 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 186 + False + + + The call to playerStandStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 172 + 5673 + 5689 + 172 + 28 + 172 + 44 + False + + + The call to playerCrouchStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 176 + 5796 + 5813 + 176 + 28 + 176 + 45 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 174 + 5720 + 5731 + 174 + 13 + 174 + 24 + False + + + The call to playerProneStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 180 + 5933 + 5949 + 180 + 28 + 180 + 44 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 178 + 5858 + 5869 + 178 + 18 + 178 + 29 + False + + + The call to cameraHeight must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 183 + 5995 + 6006 + 183 + 9 + 183 + 20 + False + + + The call to cameraHolder must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 183 + 6027 + 6038 + 183 + 41 + 183 + 52 + False + + + The call to cameraHeightVelocity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 183 + 6075 + 6094 + 183 + 89 + 183 + 108 + False + + + The call to playerStanceSmoothing must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 183 + 6097 + 6117 + 183 + 111 + 183 + 131 + False + + + The call to cameraHolder must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 185 + 6132 + 6143 + 185 + 9 + 185 + 20 + False + + + The call to cameraHolder must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 185 + 6173 + 6184 + 185 + 50 + 185 + 61 + False + + + The call to cameraHeight must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 185 + 6203 + 6214 + 185 + 80 + 185 + 91 + False + + + The call to cameraHolder must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 185 + 6217 + 6228 + 185 + 94 + 185 + 105 + False + + + The method must have a documentation header. + 187 + False + + + Adjacent elements must be separated by a blank line. + 187 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 199 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 189 + 6301 + 6319 + 189 + 14 + 189 + 32 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 189 + 6335 + 6346 + 189 + 48 + 189 + 59 + False + + + The call to StanceCheck must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 196 + 6502 + 6512 + 196 + 17 + 196 + 27 + False + + + The call to playerStandStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 196 + 6514 + 6530 + 196 + 29 + 196 + 45 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 200 + 6624 + 6635 + 200 + 13 + 200 + 24 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 194 + 6437 + 6448 + 194 + 13 + 194 + 24 + False + + + The call to jumpingForce must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 205 + 6727 + 6738 + 205 + 9 + 205 + 20 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 205 + 6755 + 6768 + 205 + 37 + 205 + 50 + False + + + The call to playerGravity must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 206 + 6794 + 6806 + 206 + 9 + 206 + 21 + False + + + The method must have a documentation header. + 209 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 216 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 219 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 223 + False + + + The call to StanceCheck must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 213 + 6934 + 6944 + 213 + 17 + 213 + 27 + False + + + The call to playerStandStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 213 + 6946 + 6962 + 213 + 29 + 213 + 45 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 217 + 7056 + 7067 + 217 + 13 + 217 + 24 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 211 + 6869 + 6880 + 211 + 13 + 211 + 24 + False + + + The call to StanceCheck must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 220 + 7136 + 7146 + 220 + 13 + 220 + 23 + False + + + The call to playerCrouchStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 220 + 7148 + 7165 + 220 + 25 + 220 + 42 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 224 + 7243 + 7254 + 224 + 9 + 224 + 20 + False + + + The method must have a documentation header. + 227 + False + + + The call to playerStance must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 229 + 7330 + 7341 + 229 + 9 + 229 + 20 + False + + + The method must have a documentation header. + 232 + False + + + The code must not contain multiple blank lines in a row. + 237 + False + + + The call to feetTransform must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 234 + 7469 + 7481 + 234 + 33 + 234 + 45 + False + + + The call to feetTransform must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 234 + 7495 + 7507 + 234 + 59 + 234 + 71 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 234 + 7522 + 7540 + 234 + 86 + 234 + 104 + False + + + The call to stanceCheckErrorMargin must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 234 + 7551 + 7572 + 234 + 115 + 234 + 136 + False + + + The call to feetTransform must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 234 + 7575 + 7587 + 234 + 139 + 234 + 151 + False + + + The call to feetTransform must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 235 + 7633 + 7645 + 235 + 31 + 235 + 43 + False + + + The call to feetTransform must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 235 + 7659 + 7671 + 235 + 57 + 235 + 69 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 235 + 7686 + 7704 + 235 + 84 + 235 + 102 + False + + + The call to stanceCheckErrorMargin must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 235 + 7715 + 7736 + 235 + 113 + 235 + 134 + False + + + The call to feetTransform must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 235 + 7759 + 7771 + 235 + 157 + 235 + 169 + False + + + The call to characterController must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 238 + 7855 + 7873 + 238 + 49 + 238 + 67 + False + + + The call to playerMask must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 238 + 7883 + 7892 + 238 + 77 + 238 + 86 + False + + + The method must have a documentation header. + 241 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 247 + False + + + The call to isSprinting must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 245 + 8008 + 8018 + 245 + 13 + 245 + 23 + False + + + The call to input_Movement must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 243 + 7958 + 7971 + 243 + 13 + 243 + 26 + False + + + The call to isSprinting must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 248 + 8070 + 8080 + 248 + 9 + 248 + 19 + False + + + The call to isSprinting must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 248 + 8085 + 8095 + 248 + 24 + 248 + 34 + False + + + The method must have a documentation header. + 251 + False + + + The call to isSprinting must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 255 + 8216 + 8226 + 255 + 13 + 255 + 23 + False + + + The call to playerSettings must begin with the 'this.', 'base.', 'object.' or 'scr_CharacterController.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 253 + 8162 + 8175 + 253 + 13 + 253 + 26 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.031 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 4 + False + + + class names begin with an upper-case letter: scr_Models. + 4 + False + + + The enum must have a documentation header. + 8 + False + + + The enumeration sub-item must have a documentation header. + 10 + False + + + The enumeration sub-item must have a documentation header. + 11 + False + + + The enumeration sub-item must have a documentation header. + 12 + False + + + The class must have a documentation header. + 16 + False + + + The field must have a documentation header. + 19 + False + + + Fields must be declared with private access. Use properties to expose fields. + 19 + False + + + The field must have a documentation header. + 20 + False + + + Fields must be declared with private access. Use properties to expose fields. + 20 + False + + + The field must have a documentation header. + 22 + False + + + Fields must be declared with private access. Use properties to expose fields. + 22 + False + + + The field must have a documentation header. + 23 + False + + + Fields must be declared with private access. Use properties to expose fields. + 23 + False + + + The field must have a documentation header. + 26 + False + + + Fields must be declared with private access. Use properties to expose fields. + 26 + False + + + The field must have a documentation header. + 27 + False + + + Fields must be declared with private access. Use properties to expose fields. + 27 + False + + + The field must have a documentation header. + 30 + False + + + Fields must be declared with private access. Use properties to expose fields. + 30 + False + + + The field must have a documentation header. + 31 + False + + + Fields must be declared with private access. Use properties to expose fields. + 31 + False + + + The field must have a documentation header. + 34 + False + + + Fields must be declared with private access. Use properties to expose fields. + 34 + False + + + The field must have a documentation header. + 35 + False + + + Fields must be declared with private access. Use properties to expose fields. + 35 + False + + + The field must have a documentation header. + 36 + False + + + Fields must be declared with private access. Use properties to expose fields. + 36 + False + + + The field must have a documentation header. + 39 + False + + + Fields must be declared with private access. Use properties to expose fields. + 39 + False + + + The field must have a documentation header. + 40 + False + + + Fields must be declared with private access. Use properties to expose fields. + 40 + False + + + The field must have a documentation header. + 41 + False + + + Fields must be declared with private access. Use properties to expose fields. + 41 + False + + + The field must have a documentation header. + 44 + False + + + Fields must be declared with private access. Use properties to expose fields. + 44 + False + + + The field must have a documentation header. + 45 + False + + + Fields must be declared with private access. Use properties to expose fields. + 45 + False + + + The field must have a documentation header. + 46 + False + + + Fields must be declared with private access. Use properties to expose fields. + 46 + False + + + The field must have a documentation header. + 47 + False + + + Fields must be declared with private access. Use properties to expose fields. + 47 + False + + + The class must have a documentation header. + 51 + False + + + The field must have a documentation header. + 53 + False + + + Fields must be declared with private access. Use properties to expose fields. + 53 + False + + + The field must have a documentation header. + 54 + False + + + Fields must be declared with private access. Use properties to expose fields. + 54 + False + + + The class must have a documentation header. + 62 + False + + + The field must have a documentation header. + 65 + False + + + Fields must be declared with private access. Use properties to expose fields. + 65 + False + + + The field must have a documentation header. + 66 + False + + + Fields must be declared with private access. Use properties to expose fields. + 66 + False + + + The field must have a documentation header. + 67 + False + + + Fields must be declared with private access. Use properties to expose fields. + 67 + False + + + The field must have a documentation header. + 68 + False + + + Fields must be declared with private access. Use properties to expose fields. + 68 + False + + + The field must have a documentation header. + 69 + False + + + Fields must be declared with private access. Use properties to expose fields. + 69 + False + + + The field must have a documentation header. + 70 + False + + + Fields must be declared with private access. Use properties to expose fields. + 70 + False + + + The field must have a documentation header. + 71 + False + + + Fields must be declared with private access. Use properties to expose fields. + 71 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 01:02:47.068 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + System using directives must be placed before all other using directives. + 3 + False + + + The class must have a documentation header. + 5 + False + + + The field must have a documentation header. + 7 + False + + + The property must have a documentation header. + 8 + False + + + Adjacent elements must be separated by a blank line. + 8 + False + + + The property must not be placed on a single line. The opening and closing curly brackets must each be placed on their own line. + 8 + False + + + All properties must be placed after all fields. + 10 + False + + + All properties must be placed after all fields. + 11 + False + + + All properties must be placed after all events. + 98 + False + + + The field must have a documentation header. + 10 + False + + + Variable names and private field names must start with a lower-case letter: DefendersTeam. + 10 + False + + + The field must have a documentation header. + 11 + False + + + Variable names and private field names must start with a lower-case letter: AttackersTeam. + 11 + False + + + The method must have a documentation header. + 13 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 16 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 18 + False + + + All private methods must be placed after all public methods. + 39 + False + + + All private methods must be placed after all public methods. + 54 + False + + + All private methods must be placed after all public methods. + 68 + False + + + The call to Destroy must begin with the 'this.', 'base.', 'object.' or 'GameManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 18 + 536 + 542 + 18 + 13 + 18 + 19 + False + + + The call to gameObject must begin with the 'this.', 'base.', 'object.' or 'GameManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 18 + 544 + 553 + 18 + 21 + 18 + 30 + False + + + The method must have a documentation header. + 21 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 33 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 35 + False + + + The call to ResetScene must begin with the 'this.', 'base.', 'object.' or 'GameManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 23 + 647 + 656 + 23 + 48 + 23 + 57 + False + + + The call to flagCaptured must begin with the 'this.', 'base.', 'object.' or 'GameManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 25 + 706 + 717 + 25 + 45 + 25 + 56 + False + + + The call to timeOut must begin with the 'this.', 'base.', 'object.' or 'GameManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 26 + 762 + 768 + 26 + 42 + 26 + 48 + False + + + The method must have a documentation header. + 39 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 43 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 45 + False + + + The body of the foreach statement must be wrapped in opening and closing curly brackets. + 49 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 50 + False + + + The method must have a documentation header. + 54 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 58 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 60 + False + + + The body of the foreach statement must be wrapped in opening and closing curly brackets. + 63 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 64 + False + + + The method must have a documentation header. + 68 + False + + + The method must have a documentation header. + 71 + False + + + method names begin with an upper-case letter: flagCaptured. + 71 + False + + + The spacing around the keyword 'switch' is invalid. + 73 + 2522 + 2527 + 73 + 9 + 73 + 14 + False + + + The method must have a documentation header. + 87 + False + + + method names begin with an upper-case letter: timeOut. + 87 + False + + + The method must have a documentation header. + 92 + False + + + The call to flagCaptured must begin with the 'this.', 'base.', 'object.' or 'GameManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 94 + 3031 + 3042 + 94 + 45 + 94 + 56 + False + + + The call to timeOut must begin with the 'this.', 'base.', 'object.' or 'GameManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 95 + 3087 + 3093 + 95 + 42 + 95 + 48 + False + + + The event must have a documentation header. + 98 + False + + + The method must have a documentation header. + 99 + False + + + Adjacent elements must be separated by a blank line. + 99 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.033 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 3 + False + + + The event must have a documentation header. + 5 + False + + + event names begin with an upper-case letter: onCaptureFlag. + 5 + False + + + The method must have a documentation header. + 7 + False + + + All methods must be placed after all events. + 13 + False + + + The event must have a documentation header. + 13 + False + + + event names begin with an upper-case letter: onTimeLeft. + 13 + False + + + The method must have a documentation header. + 14 + False + + + Adjacent elements must be separated by a blank line. + 14 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.16 19:49:10.120 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 4 + False + + + The property must have a documentation header. + 6 + False + + + property names begin with an upper-case letter: navPoints. + 6 + False + + + The method must have a documentation header. + 7 + False + + + Adjacent elements must be separated by a blank line. + 7 + False + + + A line may only contain a single statement. + 15 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.16 00:20:04.403 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 5 + False + + + The field must have a documentation header. + 7 + False + + + The property must have a documentation header. + 8 + False + + + Adjacent elements must be separated by a blank line. + 8 + False + + + The property must not be placed on a single line. The opening and closing curly brackets must each be placed on their own line. + 8 + False + + + The property must have a documentation header. + 10 + False + + + The method must have a documentation header. + 11 + False + + + Adjacent elements must be separated by a blank line. + 11 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 23 + False + + + The method must have an access modifier. + 11 + False + + + The call to Destroy must begin with the 'this.', 'base.', 'object.' or 'TimeManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 21 + 529 + 535 + 21 + 13 + 21 + 19 + False + + + The call to gameObject must begin with the 'this.', 'base.', 'object.' or 'TimeManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 21 + 537 + 546 + 21 + 21 + 21 + 30 + False + + + The method must have a documentation header. + 24 + False + + + Adjacent elements must be separated by a blank line. + 24 + False + + + The method must have an access modifier. + 24 + False + + + The call to CurrentTime must begin with the 'this.', 'base.', 'object.' or 'TimeManager.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 26 + 603 + 613 + 26 + 9 + 26 + 19 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.16 20:17:09.487 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 5 + False + + + A single-line comment must be preceded by a blank line or another single-line comment, or must be the first item in its scope. To ignore this error when commenting out a line of code, begin the comment with '////' rather than '//'. + 14 + False + + + A single-line comment must not be followed by a blank line. To ignore this error when commenting out a line of code, begin the comment with '////' rather than '//'. + 14 + False + + + The comment must start with a single space. To ignore this error when commenting out a line of code, begin the comment with '////' rather than '//'. + 14 + 391 + 414 + 14 + 5 + 14 + 28 + False + + + The property must have a documentation header. + 7 + False + + + property names begin with an upper-case letter: position. + 7 + False + + + All properties must be placed after all fields. + 11 + False + + + All properties must be placed after all fields. + 12 + False + + + All properties must be placed after all fields. + 13 + False + + + The property must have a documentation header. + 8 + False + + + Adjacent elements must be separated by a blank line. + 8 + False + + + The field must have a documentation header. + 11 + False + + + Fields must be declared with private access. Use properties to expose fields. + 11 + False + + + The field must have a documentation header. + 12 + False + + + Fields must be declared with private access. Use properties to expose fields. + 12 + False + + + The field must have a documentation header. + 13 + False + + + Fields must be declared with private access. Use properties to expose fields. + 13 + False + + + The method must have a documentation header. + 16 + False + + + The call to FlagDistance must begin with the 'this.', 'base.', 'object.' or 'NavPoint.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 18 + 460 + 471 + 18 + 9 + 18 + 20 + False + + + The call to position must begin with the 'this.', 'base.', 'object.' or 'NavPoint.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 18 + 538 + 545 + 18 + 87 + 18 + 94 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 01:00:02.216 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 4 + False + + + The spacing around the symbol '=' is invalid. + 3 + False + + + The field must have a documentation header. + 6 + False + + + Fields must be declared with private access. Use properties to expose fields. + 6 + False + + + The field must have a documentation header. + 8 + False + + + Fields must be declared with private access. Use properties to expose fields. + 8 + False + + + The field must have a documentation header. + 9 + False + + + Fields must be declared with private access. Use properties to expose fields. + 9 + False + + + The field must have a documentation header. + 12 + False + + + Fields must be declared with private access. Use properties to expose fields. + 12 + False + + + The field must have a documentation header. + 13 + False + + + Fields must be declared with private access. Use properties to expose fields. + 13 + False + + + The field must have a documentation header. + 15 + False + + + Fields must be declared with private access. Use properties to expose fields. + 15 + False + + + The field must have a documentation header. + 16 + False + + + Fields must be declared with private access. Use properties to expose fields. + 16 + False + + + The field must have a documentation header. + 17 + False + + + Fields must be declared with private access. Use properties to expose fields. + 17 + False + + + The field must have a documentation header. + 18 + False + + + Fields must be declared with private access. Use properties to expose fields. + 18 + False + + + The field must have a documentation header. + 19 + False + + + Fields must be declared with private access. Use properties to expose fields. + 19 + False + + + The field must have a documentation header. + 20 + False + + + Fields must be declared with private access. Use properties to expose fields. + 20 + False + + + The field must have a documentation header. + 22 + False + + + Fields must be declared with private access. Use properties to expose fields. + 22 + False + + + The field must have a documentation header. + 23 + False + + + Fields must be declared with private access. Use properties to expose fields. + 23 + False + + + The field must have a documentation header. + 24 + False + + + Fields must be declared with private access. Use properties to expose fields. + 24 + False + + + The field must have a documentation header. + 25 + False + + + Fields must be declared with private access. Use properties to expose fields. + 25 + False + + + The field must have a documentation header. + 27 + False + + + Fields must be declared with private access. Use properties to expose fields. + 27 + False + + + The field must have a documentation header. + 28 + False + + + Fields must be declared with private access. Use properties to expose fields. + 28 + False + + + The field must have a documentation header. + 29 + False + + + Fields must be declared with private access. Use properties to expose fields. + 29 + False + + + The field must have a documentation header. + 31 + False + + + Fields must be declared with private access. Use properties to expose fields. + 31 + False + + + The field must have a documentation header. + 33 + False + + + Fields must be declared with private access. Use properties to expose fields. + 33 + False + + + The field must have a documentation header. + 34 + False + + + Fields must be declared with private access. Use properties to expose fields. + 34 + False + + + The field must have a documentation header. + 35 + False + + + Fields must be declared with private access. Use properties to expose fields. + 35 + False + + + The field must have a documentation header. + 36 + False + + + Fields must be declared with private access. Use properties to expose fields. + 36 + False + + + The field must have a documentation header. + 37 + False + + + Fields must be declared with private access. Use properties to expose fields. + 37 + False + + + The field must have a documentation header. + 38 + False + + + Fields must be declared with private access. Use properties to expose fields. + 38 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 23:29:50.464 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 5 + False + + + The field must have a documentation header. + 7 + False + + + The property must have a documentation header. + 8 + False + + + Adjacent elements must be separated by a blank line. + 8 + False + + + The property must not be placed on a single line. The opening and closing curly brackets must each be placed on their own line. + 8 + False + + + All properties must be placed after all fields. + 15 + False + + + The method must have a documentation header. + 10 + False + + + All methods must be placed after all properties. + 16 + False + + + The field must have a documentation header. + 15 + False + + + The property must have a documentation header. + 16 + False + + + Adjacent elements must be separated by a blank line. + 16 + False + + + The property must not be placed on a single line. The opening and closing curly brackets must each be placed on their own line. + 16 + False + + + The call to gameSettings must begin with the 'this.', 'base.', 'object.' or 'SettingsReader.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 16 + 419 + 430 + 16 + 48 + 16 + 59 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.040 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 3 + False + + + The method must have a documentation header. + 5 + False + + + An opening curly bracket must not be followed by a blank line. + 6 + False + + + A closing curly bracket must not be preceded by a blank line. + 8 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 00:59:43.057 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 5 + False + + + The property must have a documentation header. + 7 + False + + + property names begin with an upper-case letter: type. + 7 + False + + + The method must have a documentation header. + 9 + False + + + The call to PickObject must begin with the 'this.', 'base.', 'object.' or 'AmmoPickUp.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 11 + 252 + 261 + 11 + 9 + 11 + 18 + False + + + The method must have a documentation header. + 14 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 00:59:39.476 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 5 + False + + + The property must have a documentation header. + 7 + False + + + property names begin with an upper-case letter: type. + 7 + False + + + The method must have a documentation header. + 9 + False + + + The call to PickObject must begin with the 'this.', 'base.', 'object.' or 'ArmourPickUp.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 11 + 250 + 259 + 11 + 9 + 11 + 18 + False + + + The method must have a documentation header. + 14 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 00:59:35.943 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 5 + False + + + The property must have a documentation header. + 7 + False + + + property names begin with an upper-case letter: type. + 7 + False + + + The method must have a documentation header. + 9 + False + + + The call to PickObject must begin with the 'this.', 'base.', 'object.' or 'HealthPickUp.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 11 + 250 + 259 + 11 + 9 + 11 + 18 + False + + + The method must have a documentation header. + 14 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.043 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The interface must have a documentation header. + 3 + False + + + Adjacent elements must be separated by a blank line. + 3 + False + + + The property must have a documentation header. + 5 + False + + + property names begin with an upper-case letter: type. + 5 + False + + + The method must have a documentation header. + 6 + False + + + Adjacent elements must be separated by a blank line. + 6 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 00:59:47.515 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 7 + False + + + The field must have a documentation header. + 9 + False + + + The property must have a documentation header. + 10 + False + + + Adjacent elements must be separated by a blank line. + 10 + False + + + The property must not be placed on a single line. The opening and closing curly brackets must each be placed on their own line. + 10 + False + + + All properties must be placed after all fields. + 12 + False + + + All properties must be placed after all fields. + 13 + False + + + All properties must be placed after all fields. + 14 + False + + + All properties must be placed after all fields. + 15 + False + + + All properties must be placed after all fields. + 17 + False + + + The call to instance must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 10 + 255 + 262 + 10 + 50 + 10 + 57 + False + + + The field must have a documentation header. + 12 + False + + + The field must have a documentation header. + 13 + False + + + The field must have a documentation header. + 14 + False + + + The field must have a documentation header. + 15 + False + + + The field must have a documentation header. + 17 + False + + + The method must have a documentation header. + 19 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 22 + False + + + The body of the else statement must be wrapped in opening and closing curly brackets. + 24 + False + + + The call to instance must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 22 + 612 + 619 + 22 + 13 + 22 + 20 + False + + + The call to instance must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 21 + 581 + 588 + 21 + 13 + 21 + 20 + False + + + The call to Destroy must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 24 + 656 + 662 + 24 + 13 + 24 + 19 + False + + + The call to gameObject must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 24 + 664 + 673 + 24 + 21 + 24 + 30 + False + + + The method must have a documentation header. + 27 + False + + + The body of the for statement must be wrapped in opening and closing curly brackets. + 32 + False + + + The body of the for statement must be wrapped in opening and closing curly brackets. + 34 + False + + + The body of the foreach statement must be wrapped in opening and closing curly brackets. + 40 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 38 + False + + + The call to pickups must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 29 + 728 + 734 + 29 + 9 + 29 + 15 + False + + + The call to pickups must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 32 + 890 + 896 + 32 + 13 + 32 + 19 + False + + + The call to healthPrefab must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 32 + 925 + 936 + 32 + 48 + 32 + 59 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 32 + 939 + 949 + 32 + 62 + 32 + 72 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 32 + 967 + 977 + 32 + 90 + 32 + 100 + False + + + The call to pickups must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 34 + 1085 + 1091 + 34 + 13 + 34 + 19 + False + + + The call to armourPrefab must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 34 + 1120 + 1131 + 34 + 48 + 34 + 59 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 34 + 1134 + 1144 + 34 + 62 + 34 + 72 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 34 + 1162 + 1172 + 34 + 90 + 34 + 100 + False + + + The call to pickups must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 37 + 1291 + 1297 + 37 + 13 + 37 + 19 + False + + + The call to ammoPrefab must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 37 + 1326 + 1335 + 37 + 48 + 37 + 57 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 37 + 1338 + 1348 + 37 + 60 + 37 + 70 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 37 + 1366 + 1376 + 37 + 88 + 37 + 98 + False + + + The call to pickups must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 39 + 1473 + 1479 + 39 + 33 + 39 + 39 + False + + + The call to StartCoroutine must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 42 + 1531 + 1544 + 42 + 9 + 42 + 22 + False + + + The call to SpawnNewPickUps must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 42 + 1546 + 1560 + 42 + 24 + 42 + 38 + False + + + The method must have a documentation header. + 45 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 58 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 55 + 1898 + 1908 + 55 + 47 + 55 + 57 + False + + + The call to spawnPoints must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 55 + 1926 + 1936 + 55 + 75 + 55 + 85 + False + + + The call to IsDisableCheck must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 50 + 1703 + 1716 + 50 + 16 + 50 + 29 + False + + + The spacing around the keyword 'while' is invalid. + 47 + 1634 + 1638 + 47 + 9 + 47 + 13 + False + + + The spacing around the keyword 'if' is invalid. + 50 + 1700 + 1701 + 50 + 13 + 50 + 14 + False + + + The code contains multiple spaces in a row. Only one space is needed. + 59 + 2062 + 2063 + 59 + 29 + 59 + 30 + False + + + The method must have a documentation header. + 63 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 72 + False + + + The call to pickups must begin with the 'this.', 'base.', 'object.' or 'PickUpSpawner.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 65 + 2196 + 2202 + 65 + 29 + 65 + 35 + False + + + The spacing around the keyword 'foreach' is invalid. + 65 + 2176 + 2182 + 65 + 9 + 65 + 15 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.044 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The enum must have a documentation header. + 1 + False + + + The enumeration sub-item must have a documentation header. + 3 + False + + + The enumeration sub-item must have a documentation header. + 4 + False + + + The enumeration sub-item must have a documentation header. + 5 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 22:38:30.693 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The code must not contain blank lines at the end of the file. + 4 + False + + + The code must not contain multiple blank lines in a row. + 4 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.046 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The enum must have a documentation header. + 1 + False + + + The enumeration sub-item must have a documentation header. + 3 + False + + + The enumeration sub-item must have a documentation header. + 4 + False + + + The enumeration sub-item must have a documentation header. + 5 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.24 00:44:55.312 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 1 + False + + + The method must have a documentation header. + 3 + False + + + Field names must not start with an underscore. + 3 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 16:12:15.866 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 11 + False + + + The field must have a documentation header. + 13 + False + + + The field must have a documentation header. + 14 + False + + + The field must have a documentation header. + 15 + False + + + The field must have a documentation header. + 16 + False + + + The field must have a documentation header. + 17 + False + + + The field must have a documentation header. + 18 + False + + + The field must have a documentation header. + 19 + False + + + The field must have a documentation header. + 20 + False + + + The field must have a documentation header. + 21 + False + + + The field must have a documentation header. + 23 + False + + + The property must have a documentation header. + 26 + False + + + The property must have a documentation header. + 31 + False + + + The indexer must have a documentation header. + 36 + False + + + The indexer must have a documentation header. + 47 + False + + + The constructor must have a documentation header. + 60 + False + + + The constructor must have a documentation header. + 65 + False + + + The constructor must have a documentation header. + 70 + False + + + The constructor must have a documentation header. + 75 + False + + + The constructor must have a documentation header. + 85 + False + + + The constructor must have a documentation header. + 90 + False + + + The method must have a documentation header. + 100 + False + + + The method must have a documentation header. + 122 + False + + + The method must have a documentation header. + 127 + False + + + The method must have a documentation header. + 146 + False + + + The method must have a documentation header. + 151 + False + + + The method must have a documentation header. + 190 + False + + + The method must have a documentation header. + 195 + False + + + The method must have a documentation header. + 226 + False + + + The method must have a documentation header. + 281 + False + + + The method must have a documentation header. + 297 + False + + + The method must have a documentation header. + 314 + False + + + The class must have a documentation header. + 325 + False + + + The field must have a documentation header. + 327 + False + + + The method must have a documentation header. + 403 + False + + + The method must have a documentation header. + 420 + False + + + The method must have a documentation header. + 439 + False + + + The property must have a documentation header. + 450 + False + + + The property must have a documentation header. + 455 + False + + + The method must have a documentation header. + 460 + False + + + The method must have a documentation header. + 465 + False + + + The method must have a documentation header. + 472 + False + + + The property must have a documentation header. + 490 + False + + + The method must have a documentation header. + 495 + False + + + The method must have a documentation header. + 500 + False + + + The method must have a documentation header. + 505 + False + + + The method must have a documentation header. + 510 + False + + + + + UNITY_2019_4_35;UNITY_2019_4;UNITY_2019;UNITY_5_3_OR_NEWER;UNITY_5_4_OR_NEWER;UNITY_5_5_OR_NEWER;UNITY_5_6_OR_NEWER;UNITY_2017_1_OR_NEWER;UNITY_2017_2_OR_NEWER;UNITY_2017_3_OR_NEWER;UNITY_2017_4_OR_NEWER;UNITY_2018_1_OR_NEWER;UNITY_2018_2_OR_NEWER;UNITY_2018_3_OR_NEWER;UNITY_2018_4_OR_NEWER;UNITY_2019_1_OR_NEWER;UNITY_2019_2_OR_NEWER;UNITY_2019_3_OR_NEWER;UNITY_2019_4_OR_NEWER;PLATFORM_ARCH_64;UNITY_64;UNITY_INCLUDE_TESTS;ENABLE_AR;ENABLE_AUDIO;ENABLE_CACHING;ENABLE_CLOTH;ENABLE_EVENT_QUEUE;ENABLE_MICROPHONE;ENABLE_MULTIPLE_DISPLAYS;ENABLE_PHYSICS;ENABLE_TEXTURE_STREAMING;ENABLE_UNET;ENABLE_LZMA;ENABLE_UNITYEVENTS;ENABLE_VR;ENABLE_WEBCAM;ENABLE_UNITYWEBREQUEST;ENABLE_WWW;ENABLE_CLOUD_SERVICES;ENABLE_CLOUD_SERVICES_COLLAB;ENABLE_CLOUD_SERVICES_COLLAB_SOFTLOCKS;ENABLE_CLOUD_SERVICES_ADS;ENABLE_CLOUD_SERVICES_USE_WEBREQUEST;ENABLE_CLOUD_SERVICES_CRASH_REPORTING;ENABLE_CLOUD_SERVICES_PURCHASING;ENABLE_CLOUD_SERVICES_ANALYTICS;ENABLE_CLOUD_SERVICES_UNET;ENABLE_CLOUD_SERVICES_BUILD;ENABLE_CLOUD_LICENSE;ENABLE_EDITOR_HUB_LICENSE;ENABLE_WEBSOCKET_CLIENT;ENABLE_DIRECTOR_AUDIO;ENABLE_DIRECTOR_TEXTURE;ENABLE_MANAGED_JOBS;ENABLE_MANAGED_TRANSFORM_JOBS;ENABLE_MANAGED_ANIMATION_JOBS;ENABLE_MANAGED_AUDIO_JOBS;INCLUDE_DYNAMIC_GI;ENABLE_MONO_BDWGC;ENABLE_SCRIPTING_GC_WBARRIERS;PLATFORM_SUPPORTS_MONO;RENDER_SOFTWARE_CURSOR;ENABLE_VIDEO;PLATFORM_STANDALONE;PLATFORM_STANDALONE_WIN;UNITY_STANDALONE_WIN;UNITY_STANDALONE;ENABLE_RUNTIME_GI;ENABLE_MOVIES;ENABLE_NETWORK;ENABLE_CRUNCH_TEXTURE_COMPRESSION;ENABLE_OUT_OF_PROCESS_CRASH_HANDLER;ENABLE_CLUSTER_SYNC;ENABLE_CLUSTERINPUT;GFXDEVICE_WAITFOREVENT_MESSAGEPUMP;ENABLE_WEBSOCKET_HOST;ENABLE_MONO;NET_4_6;ENABLE_PROFILER;DEBUG;TRACE;UNITY_ASSERTIONS;UNITY_EDITOR;UNITY_EDITOR_64;UNITY_EDITOR_WIN;ENABLE_UNITY_COLLECTIONS_CHECKS;ENABLE_BURST_AOT;UNITY_TEAM_LICENSE;ENABLE_CUSTOM_RENDER_TEXTURE;ENABLE_DIRECTOR;ENABLE_LOCALIZATION;ENABLE_SPRITES;ENABLE_TERRAIN;ENABLE_TILEMAP;ENABLE_TIMELINE;ENABLE_INPUT_SYSTEM;ENABLE_LEGACY_INPUT_MANAGER;CSHARP_7_OR_LATER;CSHARP_7_3_OR_NEWER + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.28 18:30:19.041 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + A syntax error has been discovered in file F:\SigmaRiskManagment\real shooter Git Version\Assets\Scripts\Character\NPC.cs on line 87. + 87 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.28 18:30:19.035 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 5 + False + + + The property must have a documentation header. + 8 + False + + + All properties must be placed after all fields. + 10 + False + + + All properties must be placed after all fields. + 11 + False + + + All properties must be placed after all fields. + 14 + False + + + The property must have a documentation header. + 9 + False + + + Adjacent elements must be separated by a blank line. + 9 + False + + + The field must have a documentation header. + 10 + False + + + Adjacent elements must be separated by a blank line. + 10 + False + + + The field must have a documentation header. + 11 + False + + + The property must have a documentation header. + 12 + False + + + Adjacent elements must be separated by a blank line. + 12 + False + + + The call to occupDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 12 + 362 + 375 + 12 + 33 + 12 + 46 + False + + + The call to occupAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 12 + 384 + 397 + 12 + 55 + 12 + 68 + False + + + The spacing around the symbol '>' is invalid. + 12 + 376 + 376 + 12 + 47 + 12 + 47 + False + + + The spacing around the symbol '>' is invalid. + 12 + False + + + The spacing around the symbol '>' is invalid. + 12 + 398 + 398 + 12 + 69 + 12 + 69 + False + + + The property must have a documentation header. + 13 + False + + + Adjacent elements must be separated by a blank line. + 13 + False + + + The call to occupDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 13 + 435 + 448 + 13 + 32 + 13 + 45 + False + + + The call to occupAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 13 + 460 + 473 + 13 + 57 + 13 + 70 + False + + + The field must have a documentation header. + 14 + False + + + Adjacent elements must be separated by a blank line. + 14 + False + + + The method must have a documentation header. + 16 + False + + + An opening curly bracket must not be followed by a blank line. + 17 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 24 + False + + + The call to timeForWin must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 19 + 559 + 568 + 19 + 9 + 19 + 18 + False + + + The call to TimeStayAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 20 + 628 + 644 + 20 + 9 + 20 + 25 + False + + + The call to TimeStayDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 21 + 660 + 676 + 21 + 9 + 21 + 25 + False + + + The call to occupAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 22 + 692 + 705 + 22 + 9 + 22 + 22 + False + + + The call to occupDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 23 + 721 + 734 + 23 + 9 + 23 + 22 + False + + + The method must have a documentation header. + 25 + False + + + Adjacent elements must be separated by a blank line. + 25 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 39 + False + + + The call to occupDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 30 + 889 + 902 + 30 + 17 + 30 + 30 + False + + + The call to occupAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 33 + 978 + 991 + 33 + 17 + 33 + 30 + False + + + The spacing around the keyword 'switch' is invalid. + 27 + 813 + 818 + 27 + 9 + 27 + 14 + False + + + The method must have a documentation header. + 40 + False + + + Adjacent elements must be separated by a blank line. + 40 + False + + + Statements or elements wrapped in curly brackets must be followed by a blank line. + 54 + False + + + The call to occupDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 45 + 1287 + 1300 + 45 + 17 + 45 + 30 + False + + + The call to occupAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 48 + 1376 + 1389 + 48 + 17 + 48 + 30 + False + + + The method must have a documentation header. + 55 + False + + + Adjacent elements must be separated by a blank line. + 55 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 67 + False + + + The body of the if statement must be wrapped in opening and closing curly brackets. + 73 + False + + + The call to TimeStayAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 59 + 1641 + 1657 + 59 + 13 + 59 + 29 + False + + + The call to TimeStayDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 60 + 1677 + 1693 + 60 + 13 + 60 + 29 + False + + + The call to IsOccupBoth must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 57 + 1590 + 1600 + 57 + 13 + 57 + 23 + False + + + The call to IsNotOccup must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 57 + 1605 + 1614 + 57 + 28 + 57 + 37 + False + + + The call to TimeStayAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 65 + 1794 + 1810 + 65 + 13 + 65 + 29 + False + + + The call to TimeStayAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 66 + 1848 + 1864 + 66 + 17 + 66 + 33 + False + + + The call to timeForWin must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 66 + 1868 + 1877 + 66 + 37 + 66 + 46 + False + + + The call to occupAttackers must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 63 + 1750 + 1763 + 63 + 18 + 63 + 31 + False + + + The call to TimeStayDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 71 + 1999 + 2015 + 71 + 13 + 71 + 29 + False + + + The call to TimeStayDefenders must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 72 + 2053 + 2069 + 72 + 17 + 72 + 33 + False + + + The call to timeForWin must begin with the 'this.', 'base.', 'object.' or 'FlagZone.' or 'MonoBehaviour.' prefix to indicate the intended method call. + 72 + 2073 + 2082 + 72 + 37 + 72 + 46 + False + + + + + + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.710 + 2022.04.12 15:21:15.050 + 2022.04.28 18:27:01.731 + 2022.04.28 18:27:01.731 + -2051395988 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + 2022.04.28 18:27:01.731 + 0 + + + + The file has no header, the header Xml is invalid, or the header is not located at the top of the file. + 1 + False + + + The class must have a documentation header. + 4 + False + + + + \ No newline at end of file diff --git a/test-ml-agents.ipynb b/test-ml-agents.ipynb new file mode 100644 index 0000000..1e2d31e --- /dev/null +++ b/test-ml-agents.ipynb @@ -0,0 +1,20623 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 34, + "id": "8104e2db-b1a8-40a0-a238-5d9149fd74b0", + "metadata": {}, + "outputs": [], + "source": [ + "from mlagents_envs.environment import UnityEnvironment\n", + "import mlagents_envs\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 74, + "id": "6f477382-acc9-4aec-907a-7f58caf955ed", + "metadata": {}, + "outputs": [], + "source": [ + "import random" + ] + }, + { + "cell_type": "code", + "execution_count": 83, + "id": "b7f60f26-0a90-4ea5-b2c2-b5683bda56a6", + "metadata": {}, + "outputs": [], + "source": [ + "env = UnityEnvironment()" + ] + }, + { + "cell_type": "code", + "execution_count": 84, + "id": "5929b410-12c3-4bd9-b984-b2c29a76c3f3", + "metadata": {}, + "outputs": [], + "source": [ + "env.reset()" + ] + }, + { + "cell_type": "code", + "execution_count": 85, + "id": "f108ff09-9f42-4405-add3-6df941c48f8b", + "metadata": { + "scrolled": true, + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 78.21462], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 80.694435], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.29597], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 90.925804], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.28178], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26389], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 83.26209], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25988], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26341], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30755], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 85.95022], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.14938], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.25333], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.273026], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.27214], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 1. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 1. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 2. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 2. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 1. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 0. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. , 0. ,\n", + " 1. , 2. , 1. , 0. , 0. , 0. , 91.2745],\n", + " dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.26514], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 1. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 2. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 1. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 1. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 0. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n", + "Step\n", + "(, )\n", + "DecisionStep(obs=[array([[0., 1., 0., 0., 0.],\n", + " [0., 1., 0., 0., 0.],\n", + " [0., 1., 1., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.],\n", + " [0., 0., 0., 0., 0.]], dtype=float32), array([ 0. , 0. , 0. , 3. , 3. , 3. ,\n", + " 0. , 2. , 2. , 1. , 0. , 0. ,\n", + " 0. , 91.30669], dtype=float32)], reward=0.0, agent_id=0, action_mask=[array([False]), array([False]), array([False])], group_id=0, group_reward=0.0)\n", + "0.0\n" + ] + }, + { + "ename": "UnityCommunicatorStoppedException", + "evalue": "Communicator has exited.", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mUnityCommunicatorStoppedException\u001b[0m Traceback (most recent call last)", + "Input \u001b[0;32mIn [85]\u001b[0m, in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[0;32m----> 2\u001b[0m \u001b[43menv\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstep\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 3\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStep\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 4\u001b[0m asd \u001b[38;5;241m=\u001b[39m env\u001b[38;5;241m.\u001b[39mget_steps(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mnpc?team=0\u001b[39m\u001b[38;5;124m'\u001b[39m)\n", + "File \u001b[0;32m~/opt/miniforge3/lib/python3.9/site-packages/mlagents_envs/timers.py:305\u001b[0m, in \u001b[0;36mtimed..wrapped\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 303\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mwrapped\u001b[39m(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 304\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m hierarchical_timer(func\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__qualname__\u001b[39m):\n\u001b[0;32m--> 305\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/opt/miniforge3/lib/python3.9/site-packages/mlagents_envs/environment.py:350\u001b[0m, in \u001b[0;36mUnityEnvironment.step\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 348\u001b[0m outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_communicator\u001b[38;5;241m.\u001b[39mexchange(step_input, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_poll_process)\n\u001b[1;32m 349\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m outputs \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 350\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m UnityCommunicatorStoppedException(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCommunicator has exited.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 351\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_update_behavior_specs(outputs)\n\u001b[1;32m 352\u001b[0m rl_output \u001b[38;5;241m=\u001b[39m outputs\u001b[38;5;241m.\u001b[39mrl_output\n", + "\u001b[0;31mUnityCommunicatorStoppedException\u001b[0m: Communicator has exited." + ] + } + ], + "source": [ + "while True:\n", + " env.step()\n", + " print(\"Step\")\n", + " asd = env.get_steps('npc?team=0')\n", + " print(asd)\n", + " print(asd[0][0])\n", + " _id = asd[0][0].obs[0][0][0]\n", + " print(_id)\n", + " env.set_action_for_agent('npc?team=0', 0, mlagents_envs.environment.ActionTuple(discrete=np.array([[1, 0, random.randint(0,2)]])))" + ] + }, + { + "cell_type": "code", + "execution_count": 86, + "id": "db100c84-22ab-491b-b68d-4d5c1bbc66a3", + "metadata": {}, + "outputs": [], + "source": [ + "env.close()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +}