├── .gitignore
├── Environment_Details.md
├── LICENSE.md
├── README.md
├── env_driver.py
├── gym_train.py
├── images
└── env.png
├── requirements.txt
├── rl_env_binary
├── Linux_build
│ ├── LinuxPlayer_s.debug
│ ├── RL-agent.x86_64
│ ├── RL-agent_Data
│ │ ├── Managed
│ │ │ ├── Assembly-CSharp.dll
│ │ │ ├── Google.Protobuf.dll
│ │ │ ├── Grpc.Core.dll
│ │ │ ├── Mono.Posix.dll
│ │ │ ├── Mono.Security.dll
│ │ │ ├── System.ComponentModel.Composition.dll
│ │ │ ├── System.Configuration.dll
│ │ │ ├── System.Core.dll
│ │ │ ├── System.IO.Abstractions.TestingHelpers.dll
│ │ │ ├── System.IO.Abstractions.dll
│ │ │ ├── System.Interactive.Async.dll
│ │ │ ├── System.Runtime.Serialization.dll
│ │ │ ├── System.Runtime.dll
│ │ │ ├── System.Security.dll
│ │ │ ├── System.ServiceModel.Internals.dll
│ │ │ ├── System.Xml.dll
│ │ │ ├── System.dll
│ │ │ ├── Unity.Barracuda.BurstBLAS.dll
│ │ │ ├── Unity.Barracuda.dll
│ │ │ ├── Unity.Burst.Unsafe.dll
│ │ │ ├── Unity.Burst.dll
│ │ │ ├── Unity.ML-Agents.CommunicatorObjects.dll
│ │ │ ├── Unity.ML-Agents.dll
│ │ │ ├── Unity.Mathematics.dll
│ │ │ ├── Unity.TextMeshPro.dll
│ │ │ ├── Unity.Timeline.dll
│ │ │ ├── UnityEngine.AIModule.dll
│ │ │ ├── UnityEngine.AccessibilityModule.dll
│ │ │ ├── UnityEngine.AndroidJNIModule.dll
│ │ │ ├── UnityEngine.AnimationModule.dll
│ │ │ ├── UnityEngine.AssetBundleModule.dll
│ │ │ ├── UnityEngine.AudioModule.dll
│ │ │ ├── UnityEngine.ClothModule.dll
│ │ │ ├── UnityEngine.ClusterInputModule.dll
│ │ │ ├── UnityEngine.ClusterRendererModule.dll
│ │ │ ├── UnityEngine.CoreModule.dll
│ │ │ ├── UnityEngine.CrashReportingModule.dll
│ │ │ ├── UnityEngine.DSPGraphModule.dll
│ │ │ ├── UnityEngine.DirectorModule.dll
│ │ │ ├── UnityEngine.GameCenterModule.dll
│ │ │ ├── UnityEngine.GridModule.dll
│ │ │ ├── UnityEngine.HotReloadModule.dll
│ │ │ ├── UnityEngine.IMGUIModule.dll
│ │ │ ├── UnityEngine.ImageConversionModule.dll
│ │ │ ├── UnityEngine.InputLegacyModule.dll
│ │ │ ├── UnityEngine.InputModule.dll
│ │ │ ├── UnityEngine.JSONSerializeModule.dll
│ │ │ ├── UnityEngine.LocalizationModule.dll
│ │ │ ├── UnityEngine.ParticleSystemModule.dll
│ │ │ ├── UnityEngine.PerformanceReportingModule.dll
│ │ │ ├── UnityEngine.Physics2DModule.dll
│ │ │ ├── UnityEngine.PhysicsModule.dll
│ │ │ ├── UnityEngine.ProfilerModule.dll
│ │ │ ├── UnityEngine.ScreenCaptureModule.dll
│ │ │ ├── UnityEngine.SharedInternalsModule.dll
│ │ │ ├── UnityEngine.SpriteMaskModule.dll
│ │ │ ├── UnityEngine.SpriteShapeModule.dll
│ │ │ ├── UnityEngine.StreamingModule.dll
│ │ │ ├── UnityEngine.SubstanceModule.dll
│ │ │ ├── UnityEngine.SubsystemsModule.dll
│ │ │ ├── UnityEngine.TLSModule.dll
│ │ │ ├── UnityEngine.TerrainModule.dll
│ │ │ ├── UnityEngine.TerrainPhysicsModule.dll
│ │ │ ├── UnityEngine.TextCoreModule.dll
│ │ │ ├── UnityEngine.TextRenderingModule.dll
│ │ │ ├── UnityEngine.TilemapModule.dll
│ │ │ ├── UnityEngine.UI.dll
│ │ │ ├── UnityEngine.UIElementsModule.dll
│ │ │ ├── UnityEngine.UIModule.dll
│ │ │ ├── UnityEngine.UNETModule.dll
│ │ │ ├── UnityEngine.UmbraModule.dll
│ │ │ ├── UnityEngine.UnityAnalyticsModule.dll
│ │ │ ├── UnityEngine.UnityConnectModule.dll
│ │ │ ├── UnityEngine.UnityTestProtocolModule.dll
│ │ │ ├── UnityEngine.UnityWebRequestAssetBundleModule.dll
│ │ │ ├── UnityEngine.UnityWebRequestAudioModule.dll
│ │ │ ├── UnityEngine.UnityWebRequestModule.dll
│ │ │ ├── UnityEngine.UnityWebRequestTextureModule.dll
│ │ │ ├── UnityEngine.UnityWebRequestWWWModule.dll
│ │ │ ├── UnityEngine.VFXModule.dll
│ │ │ ├── UnityEngine.VRModule.dll
│ │ │ ├── UnityEngine.VehiclesModule.dll
│ │ │ ├── UnityEngine.VideoModule.dll
│ │ │ ├── UnityEngine.WindModule.dll
│ │ │ ├── UnityEngine.XRModule.dll
│ │ │ ├── UnityEngine.dll
│ │ │ └── mscorlib.dll
│ │ ├── MonoBleedingEdge
│ │ │ └── etc
│ │ │ │ └── mono
│ │ │ │ ├── 2.0
│ │ │ │ ├── Browsers
│ │ │ │ │ └── Compat.browser
│ │ │ │ ├── DefaultWsdlHelpGenerator.aspx
│ │ │ │ ├── machine.config
│ │ │ │ ├── settings.map
│ │ │ │ └── web.config
│ │ │ │ ├── 4.0
│ │ │ │ ├── Browsers
│ │ │ │ │ └── Compat.browser
│ │ │ │ ├── DefaultWsdlHelpGenerator.aspx
│ │ │ │ ├── machine.config
│ │ │ │ ├── settings.map
│ │ │ │ └── web.config
│ │ │ │ ├── 4.5
│ │ │ │ ├── Browsers
│ │ │ │ │ └── Compat.browser
│ │ │ │ ├── DefaultWsdlHelpGenerator.aspx
│ │ │ │ ├── machine.config
│ │ │ │ ├── settings.map
│ │ │ │ └── web.config
│ │ │ │ ├── browscap.ini
│ │ │ │ ├── config
│ │ │ │ ├── mconfig
│ │ │ │ └── config.xml
│ │ │ │ └── registry
│ │ │ │ └── last-btime
│ │ ├── Plugins
│ │ │ └── lib_burst_generated.txt
│ │ ├── Resources
│ │ │ ├── UnityPlayer.png
│ │ │ ├── unity default resources
│ │ │ └── unity_builtin_extra
│ │ ├── app.info
│ │ ├── boot.config
│ │ ├── globalgamemanagers
│ │ ├── globalgamemanagers.assets
│ │ ├── level0
│ │ ├── level0.resS
│ │ ├── resources.assets
│ │ └── sharedassets0.assets
│ └── UnityPlayer_s.debug
├── Linux_headless_build
│ ├── LinuxPlayer_s.debug
│ ├── RL-agent.x86_64
│ ├── RL-agent_Data
│ │ ├── Managed
│ │ │ ├── Assembly-CSharp.dll
│ │ │ ├── Google.Protobuf.dll
│ │ │ ├── Grpc.Core.dll
│ │ │ ├── Mono.Posix.dll
│ │ │ ├── Mono.Security.dll
│ │ │ ├── System.ComponentModel.Composition.dll
│ │ │ ├── System.Configuration.dll
│ │ │ ├── System.Core.dll
│ │ │ ├── System.IO.Abstractions.TestingHelpers.dll
│ │ │ ├── System.IO.Abstractions.dll
│ │ │ ├── System.Interactive.Async.dll
│ │ │ ├── System.Runtime.Serialization.dll
│ │ │ ├── System.Runtime.dll
│ │ │ ├── System.Security.dll
│ │ │ ├── System.ServiceModel.Internals.dll
│ │ │ ├── System.Xml.dll
│ │ │ ├── System.dll
│ │ │ ├── Unity.Barracuda.BurstBLAS.dll
│ │ │ ├── Unity.Barracuda.dll
│ │ │ ├── Unity.Burst.Unsafe.dll
│ │ │ ├── Unity.Burst.dll
│ │ │ ├── Unity.ML-Agents.CommunicatorObjects.dll
│ │ │ ├── Unity.ML-Agents.dll
│ │ │ ├── Unity.Mathematics.dll
│ │ │ ├── Unity.TextMeshPro.dll
│ │ │ ├── Unity.Timeline.dll
│ │ │ ├── UnityEngine.AIModule.dll
│ │ │ ├── UnityEngine.AccessibilityModule.dll
│ │ │ ├── UnityEngine.AndroidJNIModule.dll
│ │ │ ├── UnityEngine.AnimationModule.dll
│ │ │ ├── UnityEngine.AssetBundleModule.dll
│ │ │ ├── UnityEngine.AudioModule.dll
│ │ │ ├── UnityEngine.ClothModule.dll
│ │ │ ├── UnityEngine.ClusterInputModule.dll
│ │ │ ├── UnityEngine.ClusterRendererModule.dll
│ │ │ ├── UnityEngine.CoreModule.dll
│ │ │ ├── UnityEngine.CrashReportingModule.dll
│ │ │ ├── UnityEngine.DSPGraphModule.dll
│ │ │ ├── UnityEngine.DirectorModule.dll
│ │ │ ├── UnityEngine.GameCenterModule.dll
│ │ │ ├── UnityEngine.GridModule.dll
│ │ │ ├── UnityEngine.HotReloadModule.dll
│ │ │ ├── UnityEngine.IMGUIModule.dll
│ │ │ ├── UnityEngine.ImageConversionModule.dll
│ │ │ ├── UnityEngine.InputLegacyModule.dll
│ │ │ ├── UnityEngine.InputModule.dll
│ │ │ ├── UnityEngine.JSONSerializeModule.dll
│ │ │ ├── UnityEngine.LocalizationModule.dll
│ │ │ ├── UnityEngine.ParticleSystemModule.dll
│ │ │ ├── UnityEngine.PerformanceReportingModule.dll
│ │ │ ├── UnityEngine.Physics2DModule.dll
│ │ │ ├── UnityEngine.PhysicsModule.dll
│ │ │ ├── UnityEngine.ProfilerModule.dll
│ │ │ ├── UnityEngine.ScreenCaptureModule.dll
│ │ │ ├── UnityEngine.SharedInternalsModule.dll
│ │ │ ├── UnityEngine.SpriteMaskModule.dll
│ │ │ ├── UnityEngine.SpriteShapeModule.dll
│ │ │ ├── UnityEngine.StreamingModule.dll
│ │ │ ├── UnityEngine.SubstanceModule.dll
│ │ │ ├── UnityEngine.SubsystemsModule.dll
│ │ │ ├── UnityEngine.TLSModule.dll
│ │ │ ├── UnityEngine.TerrainModule.dll
│ │ │ ├── UnityEngine.TerrainPhysicsModule.dll
│ │ │ ├── UnityEngine.TextCoreModule.dll
│ │ │ ├── UnityEngine.TextRenderingModule.dll
│ │ │ ├── UnityEngine.TilemapModule.dll
│ │ │ ├── UnityEngine.UI.dll
│ │ │ ├── UnityEngine.UIElementsModule.dll
│ │ │ ├── UnityEngine.UIModule.dll
│ │ │ ├── UnityEngine.UNETModule.dll
│ │ │ ├── UnityEngine.UmbraModule.dll
│ │ │ ├── UnityEngine.UnityAnalyticsModule.dll
│ │ │ ├── UnityEngine.UnityConnectModule.dll
│ │ │ ├── UnityEngine.UnityTestProtocolModule.dll
│ │ │ ├── UnityEngine.UnityWebRequestAssetBundleModule.dll
│ │ │ ├── UnityEngine.UnityWebRequestAudioModule.dll
│ │ │ ├── UnityEngine.UnityWebRequestModule.dll
│ │ │ ├── UnityEngine.UnityWebRequestTextureModule.dll
│ │ │ ├── UnityEngine.UnityWebRequestWWWModule.dll
│ │ │ ├── UnityEngine.VFXModule.dll
│ │ │ ├── UnityEngine.VRModule.dll
│ │ │ ├── UnityEngine.VehiclesModule.dll
│ │ │ ├── UnityEngine.VideoModule.dll
│ │ │ ├── UnityEngine.WindModule.dll
│ │ │ ├── UnityEngine.XRModule.dll
│ │ │ ├── UnityEngine.dll
│ │ │ └── mscorlib.dll
│ │ ├── MonoBleedingEdge
│ │ │ └── etc
│ │ │ │ └── mono
│ │ │ │ ├── 2.0
│ │ │ │ ├── Browsers
│ │ │ │ │ └── Compat.browser
│ │ │ │ ├── DefaultWsdlHelpGenerator.aspx
│ │ │ │ ├── machine.config
│ │ │ │ ├── settings.map
│ │ │ │ └── web.config
│ │ │ │ ├── 4.0
│ │ │ │ ├── Browsers
│ │ │ │ │ └── Compat.browser
│ │ │ │ ├── DefaultWsdlHelpGenerator.aspx
│ │ │ │ ├── machine.config
│ │ │ │ ├── settings.map
│ │ │ │ └── web.config
│ │ │ │ ├── 4.5
│ │ │ │ ├── Browsers
│ │ │ │ │ └── Compat.browser
│ │ │ │ ├── DefaultWsdlHelpGenerator.aspx
│ │ │ │ ├── machine.config
│ │ │ │ ├── settings.map
│ │ │ │ └── web.config
│ │ │ │ ├── browscap.ini
│ │ │ │ ├── config
│ │ │ │ ├── mconfig
│ │ │ │ └── config.xml
│ │ │ │ └── registry
│ │ │ │ └── last-btime
│ │ ├── Plugins
│ │ │ └── lib_burst_generated.txt
│ │ ├── Resources
│ │ │ ├── UnityPlayer.png
│ │ │ ├── unity default resources
│ │ │ └── unity_builtin_extra
│ │ ├── app.info
│ │ ├── boot.config
│ │ ├── globalgamemanagers
│ │ ├── globalgamemanagers.assets
│ │ ├── level0
│ │ ├── level0.resS
│ │ ├── resources.assets
│ │ └── sharedassets0.assets
│ └── UnityPlayer_s.debug
└── Windows_build
│ ├── Learning-Agents--r1.exe
│ ├── Learning-Agents--r1_Data
│ ├── ML-Agents
│ │ └── Timers
│ │ │ └── offline_stage_timers.json
│ ├── Managed
│ │ ├── Assembly-CSharp.dll
│ │ ├── Google.Protobuf.dll
│ │ ├── Grpc.Core.dll
│ │ ├── Mono.Posix.dll
│ │ ├── Mono.Security.dll
│ │ ├── System.ComponentModel.Composition.dll
│ │ ├── System.Configuration.dll
│ │ ├── System.Core.dll
│ │ ├── System.IO.Abstractions.TestingHelpers.dll
│ │ ├── System.IO.Abstractions.dll
│ │ ├── System.Interactive.Async.dll
│ │ ├── System.Runtime.Serialization.dll
│ │ ├── System.Runtime.dll
│ │ ├── System.Security.dll
│ │ ├── System.ServiceModel.Internals.dll
│ │ ├── System.Xml.dll
│ │ ├── System.dll
│ │ ├── Unity.Barracuda.BurstBLAS.dll
│ │ ├── Unity.Barracuda.dll
│ │ ├── Unity.Burst.Unsafe.dll
│ │ ├── Unity.Burst.dll
│ │ ├── Unity.ML-Agents.CommunicatorObjects.dll
│ │ ├── Unity.ML-Agents.dll
│ │ ├── Unity.Mathematics.dll
│ │ ├── Unity.TextMeshPro.dll
│ │ ├── Unity.Timeline.dll
│ │ ├── UnityEngine.AIModule.dll
│ │ ├── UnityEngine.ARModule.dll
│ │ ├── UnityEngine.AccessibilityModule.dll
│ │ ├── UnityEngine.AndroidJNIModule.dll
│ │ ├── UnityEngine.AnimationModule.dll
│ │ ├── UnityEngine.AssetBundleModule.dll
│ │ ├── UnityEngine.AudioModule.dll
│ │ ├── UnityEngine.ClothModule.dll
│ │ ├── UnityEngine.ClusterInputModule.dll
│ │ ├── UnityEngine.ClusterRendererModule.dll
│ │ ├── UnityEngine.CoreModule.dll
│ │ ├── UnityEngine.CrashReportingModule.dll
│ │ ├── UnityEngine.DSPGraphModule.dll
│ │ ├── UnityEngine.DirectorModule.dll
│ │ ├── UnityEngine.GameCenterModule.dll
│ │ ├── UnityEngine.GridModule.dll
│ │ ├── UnityEngine.HotReloadModule.dll
│ │ ├── UnityEngine.IMGUIModule.dll
│ │ ├── UnityEngine.ImageConversionModule.dll
│ │ ├── UnityEngine.InputLegacyModule.dll
│ │ ├── UnityEngine.InputModule.dll
│ │ ├── UnityEngine.JSONSerializeModule.dll
│ │ ├── UnityEngine.LocalizationModule.dll
│ │ ├── UnityEngine.ParticleSystemModule.dll
│ │ ├── UnityEngine.PerformanceReportingModule.dll
│ │ ├── UnityEngine.Physics2DModule.dll
│ │ ├── UnityEngine.PhysicsModule.dll
│ │ ├── UnityEngine.ProfilerModule.dll
│ │ ├── UnityEngine.ScreenCaptureModule.dll
│ │ ├── UnityEngine.SharedInternalsModule.dll
│ │ ├── UnityEngine.SpriteMaskModule.dll
│ │ ├── UnityEngine.SpriteShapeModule.dll
│ │ ├── UnityEngine.StreamingModule.dll
│ │ ├── UnityEngine.SubstanceModule.dll
│ │ ├── UnityEngine.SubsystemsModule.dll
│ │ ├── UnityEngine.TLSModule.dll
│ │ ├── UnityEngine.TerrainModule.dll
│ │ ├── UnityEngine.TerrainPhysicsModule.dll
│ │ ├── UnityEngine.TextCoreModule.dll
│ │ ├── UnityEngine.TextRenderingModule.dll
│ │ ├── UnityEngine.TilemapModule.dll
│ │ ├── UnityEngine.UI.dll
│ │ ├── UnityEngine.UIElementsModule.dll
│ │ ├── UnityEngine.UIModule.dll
│ │ ├── UnityEngine.UNETModule.dll
│ │ ├── UnityEngine.UmbraModule.dll
│ │ ├── UnityEngine.UnityAnalyticsModule.dll
│ │ ├── UnityEngine.UnityConnectModule.dll
│ │ ├── UnityEngine.UnityTestProtocolModule.dll
│ │ ├── UnityEngine.UnityWebRequestAssetBundleModule.dll
│ │ ├── UnityEngine.UnityWebRequestAudioModule.dll
│ │ ├── UnityEngine.UnityWebRequestModule.dll
│ │ ├── UnityEngine.UnityWebRequestTextureModule.dll
│ │ ├── UnityEngine.UnityWebRequestWWWModule.dll
│ │ ├── UnityEngine.VFXModule.dll
│ │ ├── UnityEngine.VRModule.dll
│ │ ├── UnityEngine.VehiclesModule.dll
│ │ ├── UnityEngine.VideoModule.dll
│ │ ├── UnityEngine.WindModule.dll
│ │ ├── UnityEngine.XRModule.dll
│ │ ├── UnityEngine.dll
│ │ └── mscorlib.dll
│ ├── Plugins
│ │ └── x86_64
│ │ │ ├── grpc_csharp_ext.x64.dll
│ │ │ ├── lib_burst_generated.dll
│ │ │ └── lib_burst_generated.txt
│ ├── Resources
│ │ ├── unity default resources
│ │ └── unity_builtin_extra
│ ├── app.info
│ ├── boot.config
│ ├── globalgamemanagers
│ ├── globalgamemanagers.assets
│ ├── level0
│ ├── level0.resS
│ ├── resources.assets
│ └── sharedassets0.assets
│ ├── MonoBleedingEdge
│ ├── EmbedRuntime
│ │ ├── MonoPosixHelper.dll
│ │ └── mono-2.0-bdwgc.dll
│ └── etc
│ │ └── mono
│ │ ├── 2.0
│ │ ├── Browsers
│ │ │ └── Compat.browser
│ │ ├── DefaultWsdlHelpGenerator.aspx
│ │ ├── machine.config
│ │ ├── settings.map
│ │ └── web.config
│ │ ├── 4.0
│ │ ├── Browsers
│ │ │ └── Compat.browser
│ │ ├── DefaultWsdlHelpGenerator.aspx
│ │ ├── machine.config
│ │ ├── settings.map
│ │ └── web.config
│ │ ├── 4.5
│ │ ├── Browsers
│ │ │ └── Compat.browser
│ │ ├── DefaultWsdlHelpGenerator.aspx
│ │ ├── machine.config
│ │ ├── settings.map
│ │ └── web.config
│ │ ├── browscap.ini
│ │ ├── config
│ │ └── mconfig
│ │ └── config.xml
│ ├── UnityCrashHandler64.exe
│ └── UnityPlayer.dll
├── statistics.py
├── test.py
└── train.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | .hypothesis/
51 | .pytest_cache/
52 |
53 | # Translations
54 | *.mo
55 | *.pot
56 |
57 | # Django stuff:
58 | *.log
59 | local_settings.py
60 | db.sqlite3
61 | db.sqlite3-journal
62 |
63 | # Flask stuff:
64 | instance/
65 | .webassets-cache
66 |
67 | # Scrapy stuff:
68 | .scrapy
69 |
70 | # Sphinx documentation
71 | docs/_build/
72 |
73 | # PyBuilder
74 | target/
75 |
76 | # Jupyter Notebook
77 | .ipynb_checkpoints
78 |
79 | # IPython
80 | profile_default/
81 | ipython_config.py
82 |
83 | # pyenv
84 | .python-version
85 |
86 | # pipenv
87 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
88 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
89 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
90 | # install all needed dependencies.
91 | #Pipfile.lock
92 |
93 | # celery beat schedule file
94 | celerybeat-schedule
95 |
96 | # SageMath parsed files
97 | *.sage.py
98 |
99 | # Environments
100 | .env
101 | .venv
102 | env/
103 | venv/
104 | ENV/
105 | env.bak/
106 | venv.bak/
107 |
108 | # Spyder project settings
109 | .spyderproject
110 | .spyproject
111 |
112 | # Rope project settings
113 | .ropeproject
114 |
115 | # mkdocs documentation
116 | /site
117 |
118 | # mypy
119 | .mypy_cache/
120 | .dmypy.json
121 | dmypy.json
122 |
123 | # Pyre type checker
124 | .pyre/
125 |
--------------------------------------------------------------------------------
/Environment_Details.md:
--------------------------------------------------------------------------------
1 | ## Find Flag
2 |
3 | 
4 |
5 |
6 | - Set-up: Environment where the agent needs to figure out how to find the flag in a minimum amount of time
7 | - Goal: Find the flag (destination) in a minimum amount of time
8 | - Agents: The environment contains one agent
9 | - Agent Reward Function (independent):
10 | - +2 For finding the flag
11 | - -0.0002 at each step
12 | - -0.0002 when agent crashes into the walls
13 | - Behavior Parameters:
14 | - Vector Observation space: 54 variables corresponding to
15 | - 52 ray-casts each detecting one of two possible objects (flag & walls)
16 | - 2 variables to capture x & z velocity of an agent
17 | - Vector Action space (Continuous): Size of 2 correspondings to agents rotation and forward/backward movement
18 | - Visual Observations (Optional): None
19 | - Benchmark Mean Reward: 1.84
20 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 |
2 | The MIT License (MIT)
3 |
4 | Copyright (c) 2020 Dhyey Thumar
5 |
6 | Permission is hereby granted, free of charge, to any person obtaining a copy
7 | of this software and associated documentation files (the "Software"), to deal
8 | in the Software without restriction, including without limitation the rights
9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 | copies of the Software, and to permit persons to whom the Software is
11 | furnished to do so, subject to the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be included in all
14 | copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 | SOFTWARE.
23 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # PPO Algorithm with a custom environment
2 |
3 |
4 | This repo contains the implementation of the Proximal Policy Optimization algorithm using the Keras library on a custom environment made with Unity 3D engine.
5 |
6 |
7 |
8 | > **Important details about this repository:**
9 | > - Unity engine version used to build the environment = [2019.3.15f1](https://unity3d.com/get-unity/download/archive)
10 | > - ML-Agents branch = [release_1](https://github.com/Unity-Technologies/ml-agents/tree/release_1_branch)
11 | > - Environment binary:
12 | > - For Windows = [Learning-Agents--r1 (.exe)](https://github.com/Dhyeythumar/PPO-algo-with-custom-Unity-environment/tree/main/rl_env_binary/Windows_build)
13 | > - For Linux(Headless/Server build) = [RL-agent (.x86_64)](https://github.com/Dhyeythumar/PPO-algo-with-custom-Unity-environment/tree/main/rl_env_binary/Linux_headless_build)
14 | > - For Linux(Normal build) = [RL-agent (.x86_64)](https://github.com/Dhyeythumar/PPO-algo-with-custom-Unity-environment/tree/main/rl_env_binary/Linux_build)
15 |
16 | **Windows environment binary is used in this repo. But if you want to use the Linux environment binary, then change the ENV_NAME in train.py & test.py scripts to the correct path pointing to those binaries stored [over here](https://github.com/Dhyeythumar/PPO-algo-with-custom-Unity-environment/tree/main/rl_env_binary).**
17 |
18 |
19 | ## What’s In This Document
20 | - [Introduction](#introduction)
21 | - [Environment Specific Details](#environment-specific-details)
22 | - [Setup Instructions](#setup-instructions)
23 | - [Getting Started](#getting-started)
24 | - [Motivation and Learning](#motivation-and-learning)
25 | - [License](#license)
26 | - [Acknowledgements](#acknowledgements)
27 |
28 |
29 | ## Introduction
30 | - Check out [**this video**](https://youtu.be/4vwZNTagHsQ) to see the trained agent using the learned navigation skills to find the flag in a closed environment, which is divided into nine different segments.
31 | - And if you want to see the training phase/process of this agent, then check out [**this video**](https://youtu.be/eIp36b5lBVM).
32 |
33 |
34 | ## Environment Specific Details
35 | These are some details which you should know before hand. And I think without knowing this, you might get confused because some of the Keras implementations are environment-dependent.
36 |
37 | [**Check this doc for detailed information.**](./Environment_Details.md)
38 |
39 | A small overview of the environment:
40 | - Observation/State space: Vectorized (unlike Image)
41 | - Action space: Continuous (unlike Discrete)
42 | - Action shape: (num of agents, 2) (Here num of agents alive at every env step is 1, so shape(1, 2))
43 | - Reward System:
44 | - (1.0/MaxStep) per step (MaxStep is used to reset the env irrespective of achieving the goal state) & the same reward is used if the agent crashes into the walls.
45 | - +2 if the agent reaches the goal state.
46 |
47 |
48 | ## Setup Instructions
49 | Install the ML-Agents github repo [release_1 branch](https://github.com/Unity-Technologies/ml-agents/tree/release_1_branch), but if you want to use the different branch version then modify the python APIs to interact with the environment.
50 |
51 | - **Clone this repos:**
52 | ```bash
53 | $ git clone --branch release_1 https://github.com/Unity-Technologies/ml-agents.git
54 |
55 | $ git clone https://github.com/Dhyeythumar/PPO-algo-with-custom-Unity-environment.git
56 | ```
57 |
58 | - **Create and activate the python virtual environment:** _(Python version used - 3.8.x)_
59 | ```bash
60 | $ python -m venv myvenv
61 | $ myvenv\Scripts\activate
62 | ```
63 |
64 | - **Install the dependencies:** _(check the exact dependency versions in [requirements.txt](https://github.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/blob/main/requirements.txt) file)_
65 | ```bash
66 | (myvenv) $ pip install -e ./ml-agents/ml-agents-envs
67 | (myvenv) $ pip install tensorflow
68 | (myvenv) $ pip install keras
69 | (myvenv) $ pip install tensorboardX
70 | ```
71 |
72 |
73 | ## Getting Started
74 | - **Now to start the training process use the following commands:**
75 | ```bash
76 | (myvenv) $ cd PPO-algo-with-custom-Unity-environment
77 | (myvenv) $ python train.py
78 | ```
79 |
80 | - **Activate the tensorboard:**
81 | ```bash
82 | $ tensorboard --logdir=./training_data/summaries --port 6006
83 | ```
84 |
85 |
86 | ## Motivation and Learning
87 | [**This video**](https://youtu.be/kopoLzvh5jY) by [**OpenAI**](https://openai.com/) inspired me to develop something in the field of reinforcement learning. So for the first phase, I decided to create a simple RL agent who can learn navigation skills.
88 |
89 | After completing the first phase, I gained much deeper knowledge in the RL domain and got some of my following questions answered:
90 | - How to create custom 3D environments using the Unity engine?
91 | - How to use ML-Agents (Unity's toolkit for reinforcement learning) to train the RL agents?
92 | - And I also learned to implement the PPO algorithm using the Keras library. :smiley:
93 |
94 | **What's next?** 🤔
95 |
96 | So I have started working on the next phase of this project, which will include a multi-agent environment setup and, I am also planning to increase the difficulty level. So for more updates, stay tuned for the next video on my [**youtube channel**](https://www.youtube.com/channel/UCpKizIKSk8ga_LCI3e3GUig).
97 |
98 | ## License
99 | Licensed under the [MIT License](./LICENSE).
100 |
101 |
102 | ## Acknowledgements
103 | 1. [Unity ML-Agents Python Low Level API](https://github.com/Unity-Technologies/ml-agents/blob/release_1_branch/docs/Python-API.md)
104 | 2. [rl-bot-football](https://github.com/ChintanTrivedi/rl-bot-football)
105 |
--------------------------------------------------------------------------------
/env_driver.py:
--------------------------------------------------------------------------------
1 | from mlagents_envs.environment import UnityEnvironment
2 | from mlagents_envs.side_channel.engine_configuration_channel import (
3 | EngineConfigurationChannel,
4 | )
5 | from mlagents_envs.exception import (
6 | UnityEnvironmentException,
7 | UnityCommunicationException,
8 | UnityCommunicatorStoppedException,
9 | )
10 | import numpy as np
11 |
12 | # Name of the Unity environment binary to launch
13 | ENV_NAME = "./rl_env_binary/Windows_build/Learning-Agents--r1"
14 |
15 | engine_config_channel = EngineConfigurationChannel()
16 | engine_config_channel.set_configuration_parameters(
17 | width=1800, height=900, time_scale=1.0
18 | )
19 | env = UnityEnvironment(
20 | file_name=ENV_NAME, seed=1, side_channels=[engine_config_channel]
21 | )
22 | env.reset() # Reset the environment
23 |
24 | # Set the default brain to work with
25 | behavior_name = env.get_behavior_names()[0]
26 | behavior_spec = env.get_behavior_spec(behavior_name)
27 | n_actions = behavior_spec.action_size # => 2
28 | state_dims = np.sum(behavior_spec.observation_shapes) # total obs => 54
29 |
30 | # --- Env Spec ---
31 | if behavior_spec.is_action_continuous():
32 | print("Action space is CONTINUOUS i.e {0, 0.1, 0.2}")
33 | else:
34 | print("Action space is DISCRETE i.e {0, 1, 2}")
35 | print(behavior_spec.discrete_action_branches)
36 |
37 | print("\nbehavior_spec.observation_shapes :: ", end="")
38 | print(behavior_spec.observation_shapes) # => [(52,), (2,)]
39 |
40 | # ----------------------------------------------------------------------
41 | # Get the state/obs of an agent
42 | step_result = env.get_steps(behavior_name) # shape(2,)
43 |
44 | # Examine the state space for the first observation for the first agent
45 | print("\nAgent observation: \n{}\n".format(step_result[0].obs))
46 | # => [shape(1, 52), shape(1, 2)]
47 |
48 | # There are 2 obs vectors (Ray cast vals & velocity vals)
49 | for obs in step_result[0].obs:
50 | print(obs.shape)
51 |
52 | print(step_result[0].__dict__)
53 | print(step_result[1].__dict__) # data filled at the end of episode.
54 |
55 | try:
56 | for episode in range(10): # running for 10 episodes.
57 | print("Starting with a new episode...\n\n")
58 | env.reset()
59 | step_result = env.get_steps(behavior_name)
60 | done = False
61 | episode_rewards = 0
62 | end_episode_rewards = 0
63 | # i = 0;
64 | while not done: # running for 1 episode i.e 5000 max_steps
65 | n_agents = len(step_result[0])
66 |
67 | # if behavior_spec.is_action_continuous():
68 | action = np.random.randn(n_agents, n_actions)
69 | action = np.clip(action, -1, 1)
70 | print(action)
71 | env.set_actions(behavior_name, action)
72 | env.step()
73 |
74 | step_result = env.get_steps(behavior_name)
75 | episode_rewards += step_result[0].reward[0]
76 | end_episode_rewards += (
77 | step_result[1].reward[0] if len(step_result[1]) else 0
78 | )
79 | done = step_result[1].max_step[0] if len(step_result[1]) else False
80 | # i += 1
81 | print(
82 | "\n\nTotal reward in this episode: {} :: {}".format(
83 | episode_rewards, end_episode_rewards
84 | )
85 | )
86 | # print(i) # will give 1000 as o/p when 5000 max_step is hit (i.e after 5 step a decision is asked).
87 | except (
88 | KeyboardInterrupt,
89 | UnityCommunicationException,
90 | UnityEnvironmentException,
91 | UnityCommunicatorStoppedException,
92 | ) as ex:
93 | print("-" * 100)
94 | print("Exception has occured !!")
95 | print("Testing of env was interrupted.")
96 | print("-" * 100)
97 | finally:
98 | print("Closing the env")
99 |
100 | env.close()
--------------------------------------------------------------------------------
/images/env.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/images/env.png
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/requirements.txt
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/LinuxPlayer_s.debug:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/LinuxPlayer_s.debug
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent.x86_64:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent.x86_64
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/Assembly-CSharp.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/Assembly-CSharp.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/Google.Protobuf.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/Google.Protobuf.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/Grpc.Core.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/Grpc.Core.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/Mono.Posix.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/Mono.Posix.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/Mono.Security.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/Mono.Security.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.ComponentModel.Composition.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.ComponentModel.Composition.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.Configuration.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.Configuration.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.Core.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.Core.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.IO.Abstractions.TestingHelpers.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.IO.Abstractions.TestingHelpers.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.IO.Abstractions.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.IO.Abstractions.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.Interactive.Async.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.Interactive.Async.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.Runtime.Serialization.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.Runtime.Serialization.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.Runtime.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.Runtime.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.Security.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.Security.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.ServiceModel.Internals.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.ServiceModel.Internals.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.Xml.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.Xml.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/System.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.Barracuda.BurstBLAS.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.Barracuda.BurstBLAS.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.Barracuda.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.Barracuda.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.Burst.Unsafe.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.Burst.Unsafe.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.Burst.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.Burst.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.ML-Agents.CommunicatorObjects.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.ML-Agents.CommunicatorObjects.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.ML-Agents.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.ML-Agents.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.Mathematics.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.Mathematics.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.TextMeshPro.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.TextMeshPro.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.Timeline.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/Unity.Timeline.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.AIModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.AIModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.AccessibilityModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.AccessibilityModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.AndroidJNIModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.AndroidJNIModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.AnimationModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.AnimationModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.AssetBundleModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.AssetBundleModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.AudioModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.AudioModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.ClothModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.ClothModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.ClusterInputModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.ClusterInputModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.ClusterRendererModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.ClusterRendererModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.CoreModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.CoreModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.CrashReportingModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.CrashReportingModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.DSPGraphModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.DSPGraphModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.DirectorModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.DirectorModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.GameCenterModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.GameCenterModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.GridModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.GridModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.HotReloadModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.HotReloadModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.IMGUIModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.IMGUIModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.ImageConversionModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.ImageConversionModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.InputLegacyModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.InputLegacyModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.InputModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.InputModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.JSONSerializeModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.JSONSerializeModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.LocalizationModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.LocalizationModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.ParticleSystemModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.ParticleSystemModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.PerformanceReportingModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.PerformanceReportingModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.Physics2DModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.Physics2DModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.PhysicsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.PhysicsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.ProfilerModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.ProfilerModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.ScreenCaptureModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.ScreenCaptureModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.SharedInternalsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.SharedInternalsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.SpriteMaskModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.SpriteMaskModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.SpriteShapeModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.SpriteShapeModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.StreamingModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.StreamingModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.SubstanceModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.SubstanceModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.SubsystemsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.SubsystemsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.TLSModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.TLSModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.TerrainModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.TerrainModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.TerrainPhysicsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.TerrainPhysicsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.TextCoreModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.TextCoreModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.TextRenderingModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.TextRenderingModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.TilemapModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.TilemapModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UI.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UI.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UIElementsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UIElementsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UIModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UIModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UNETModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UNETModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UmbraModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UmbraModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityAnalyticsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityAnalyticsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityConnectModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityConnectModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityTestProtocolModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityTestProtocolModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestAssetBundleModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestAssetBundleModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestAudioModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestAudioModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestTextureModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestTextureModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestWWWModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestWWWModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.VFXModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.VFXModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.VRModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.VRModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.VehiclesModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.VehiclesModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.VideoModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.VideoModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.WindModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.WindModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.XRModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.XRModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/UnityEngine.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Managed/mscorlib.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Managed/mscorlib.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/MonoBleedingEdge/etc/mono/2.0/Browsers/Compat.browser:
--------------------------------------------------------------------------------
1 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/MonoBleedingEdge/etc/mono/2.0/settings.map:
--------------------------------------------------------------------------------
1 |
2 |
3 |
25 |
26 |
48 |
49 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/MonoBleedingEdge/etc/mono/4.0/Browsers/Compat.browser:
--------------------------------------------------------------------------------
1 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/MonoBleedingEdge/etc/mono/4.0/settings.map:
--------------------------------------------------------------------------------
1 |
2 |
3 |
25 |
26 |
48 |
49 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/MonoBleedingEdge/etc/mono/4.5/Browsers/Compat.browser:
--------------------------------------------------------------------------------
1 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/MonoBleedingEdge/etc/mono/4.5/settings.map:
--------------------------------------------------------------------------------
1 |
2 |
3 |
25 |
26 |
48 |
49 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/MonoBleedingEdge/etc/mono/browscap.ini:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/MonoBleedingEdge/etc/mono/browscap.ini
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/MonoBleedingEdge/etc/mono/config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/MonoBleedingEdge/etc/mono/registry/last-btime:
--------------------------------------------------------------------------------
1 | 1590188007
2 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Plugins/lib_burst_generated.txt:
--------------------------------------------------------------------------------
1 | Library: lib_burst_generated
2 | --platform=Linux
3 | --backend=burst-llvm-9
4 | --target=X64_SSE2
5 | --dump=Function
6 | --float-precision=Standard
7 | --output=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Plugins\lib_burst_generated
8 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Barracuda.dll
9 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\UnityEngine.UI.dll
10 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.TextMeshPro.dll
11 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Timeline.dll
12 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Mathematics.dll
13 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Burst.dll
14 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.ML-Agents.dll
15 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Assembly-CSharp.dll
16 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.ML-Agents.CommunicatorObjects.dll
17 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Barracuda.BurstBLAS.dll
18 | --assembly-folder=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\
19 | --method=Unity.Burst.BurstCompiler+BurstCompilerHelper, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::IsBurstEnabled()--8c2be93e18276203cbd918daa2748a10
20 | --method=Unity.Burst.Intrinsics.X86, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::DoSetCSRTrampoline(System.Int32, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089)--da352d92cabf024fc9986011d52a4537
21 | --method=Unity.Burst.Intrinsics.X86, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::DoGetCSRTrampoline()--89425a97f3f500fa810ad03f0c382542
22 | --method=Unity.Jobs.IJobParallelForExtensions+ParallelForJobStruct`1[[Unity.Barracuda.UnsafeMatrixBlockMultiplyUnrolled8xhJob, Unity.Barracuda.BurstBLAS, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null]], UnityEngine.CoreModule, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::Execute(Unity.Barracuda.UnsafeMatrixBlockMultiplyUnrolled8xhJob&, Unity.Barracuda.BurstBLAS, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null|System.IntPtr, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089|System.IntPtr, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089|Unity.Jobs.LowLevel.Unsafe.JobRanges&, UnityEngine.CoreModule, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null|System.Int32, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089)--ee34a7dd1b8659539d2d5ec51926763d
23 | --platform=Linux
24 | --backend=burst-llvm-9
25 | --target=AVX2
26 | --dump=Function
27 | --float-precision=Standard
28 | --output=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Plugins\lib_burst_generated
29 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Barracuda.dll
30 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\UnityEngine.UI.dll
31 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.TextMeshPro.dll
32 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Timeline.dll
33 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Mathematics.dll
34 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Burst.dll
35 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.ML-Agents.dll
36 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Assembly-CSharp.dll
37 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.ML-Agents.CommunicatorObjects.dll
38 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Barracuda.BurstBLAS.dll
39 | --assembly-folder=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\
40 | --method=Unity.Burst.BurstCompiler+BurstCompilerHelper, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::IsBurstEnabled()--8c2be93e18276203cbd918daa2748a10
41 | --method=Unity.Burst.Intrinsics.X86, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::DoSetCSRTrampoline(System.Int32, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089)--da352d92cabf024fc9986011d52a4537
42 | --method=Unity.Burst.Intrinsics.X86, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::DoGetCSRTrampoline()--89425a97f3f500fa810ad03f0c382542
43 | --method=Unity.Jobs.IJobParallelForExtensions+ParallelForJobStruct`1[[Unity.Barracuda.UnsafeMatrixBlockMultiplyUnrolled8xhJob, Unity.Barracuda.BurstBLAS, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null]], UnityEngine.CoreModule, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::Execute(Unity.Barracuda.UnsafeMatrixBlockMultiplyUnrolled8xhJob&, Unity.Barracuda.BurstBLAS, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null|System.IntPtr, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089|System.IntPtr, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089|Unity.Jobs.LowLevel.Unsafe.JobRanges&, UnityEngine.CoreModule, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null|System.Int32, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089)--ee34a7dd1b8659539d2d5ec51926763d
44 |
45 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Resources/UnityPlayer.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Resources/UnityPlayer.png
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Resources/unity default resources:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Resources/unity default resources
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/Resources/unity_builtin_extra:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/Resources/unity_builtin_extra
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/app.info:
--------------------------------------------------------------------------------
1 | DefaultCompany
2 | Learning-Agents--r1
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/boot.config:
--------------------------------------------------------------------------------
1 | gfx-enable-gfx-jobs=1
2 | gfx-enable-native-gfx-jobs=1
3 | wait-for-native-debugger=0
4 | vr-enabled=0
5 | hdr-display-enabled=0
6 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/globalgamemanagers:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/globalgamemanagers
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/globalgamemanagers.assets:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/globalgamemanagers.assets
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/level0:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/level0
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/level0.resS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/level0.resS
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/resources.assets:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/resources.assets
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/RL-agent_Data/sharedassets0.assets:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/RL-agent_Data/sharedassets0.assets
--------------------------------------------------------------------------------
/rl_env_binary/Linux_build/UnityPlayer_s.debug:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_build/UnityPlayer_s.debug
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/LinuxPlayer_s.debug:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/LinuxPlayer_s.debug
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent.x86_64:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent.x86_64
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Assembly-CSharp.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Assembly-CSharp.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Google.Protobuf.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Google.Protobuf.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Grpc.Core.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Grpc.Core.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Mono.Posix.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Mono.Posix.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Mono.Security.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Mono.Security.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.ComponentModel.Composition.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.ComponentModel.Composition.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.Configuration.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.Configuration.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.Core.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.Core.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.IO.Abstractions.TestingHelpers.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.IO.Abstractions.TestingHelpers.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.IO.Abstractions.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.IO.Abstractions.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.Interactive.Async.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.Interactive.Async.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.Runtime.Serialization.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.Runtime.Serialization.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.Runtime.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.Runtime.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.Security.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.Security.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.ServiceModel.Internals.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.ServiceModel.Internals.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.Xml.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.Xml.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/System.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.Barracuda.BurstBLAS.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.Barracuda.BurstBLAS.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.Barracuda.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.Barracuda.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.Burst.Unsafe.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.Burst.Unsafe.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.Burst.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.Burst.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.ML-Agents.CommunicatorObjects.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.ML-Agents.CommunicatorObjects.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.ML-Agents.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.ML-Agents.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.Mathematics.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.Mathematics.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.TextMeshPro.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.TextMeshPro.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.Timeline.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/Unity.Timeline.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.AIModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.AIModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.AccessibilityModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.AccessibilityModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.AndroidJNIModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.AndroidJNIModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.AnimationModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.AnimationModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.AssetBundleModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.AssetBundleModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.AudioModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.AudioModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.ClothModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.ClothModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.ClusterInputModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.ClusterInputModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.ClusterRendererModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.ClusterRendererModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.CoreModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.CoreModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.CrashReportingModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.CrashReportingModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.DSPGraphModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.DSPGraphModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.DirectorModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.DirectorModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.GameCenterModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.GameCenterModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.GridModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.GridModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.HotReloadModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.HotReloadModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.IMGUIModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.IMGUIModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.ImageConversionModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.ImageConversionModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.InputLegacyModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.InputLegacyModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.InputModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.InputModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.JSONSerializeModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.JSONSerializeModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.LocalizationModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.LocalizationModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.ParticleSystemModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.ParticleSystemModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.PerformanceReportingModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.PerformanceReportingModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.Physics2DModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.Physics2DModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.PhysicsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.PhysicsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.ProfilerModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.ProfilerModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.ScreenCaptureModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.ScreenCaptureModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.SharedInternalsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.SharedInternalsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.SpriteMaskModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.SpriteMaskModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.SpriteShapeModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.SpriteShapeModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.StreamingModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.StreamingModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.SubstanceModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.SubstanceModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.SubsystemsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.SubsystemsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.TLSModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.TLSModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.TerrainModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.TerrainModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.TerrainPhysicsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.TerrainPhysicsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.TextCoreModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.TextCoreModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.TextRenderingModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.TextRenderingModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.TilemapModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.TilemapModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UI.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UI.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UIElementsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UIElementsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UIModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UIModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UNETModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UNETModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UmbraModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UmbraModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityAnalyticsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityAnalyticsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityConnectModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityConnectModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityTestProtocolModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityTestProtocolModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestAssetBundleModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestAssetBundleModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestAudioModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestAudioModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestTextureModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestTextureModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestWWWModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.UnityWebRequestWWWModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.VFXModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.VFXModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.VRModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.VRModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.VehiclesModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.VehiclesModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.VideoModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.VideoModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.WindModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.WindModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.XRModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.XRModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/UnityEngine.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/mscorlib.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Managed/mscorlib.dll
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/MonoBleedingEdge/etc/mono/2.0/Browsers/Compat.browser:
--------------------------------------------------------------------------------
1 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/MonoBleedingEdge/etc/mono/2.0/settings.map:
--------------------------------------------------------------------------------
1 |
2 |
3 |
25 |
26 |
48 |
49 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/MonoBleedingEdge/etc/mono/4.0/Browsers/Compat.browser:
--------------------------------------------------------------------------------
1 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/MonoBleedingEdge/etc/mono/4.0/settings.map:
--------------------------------------------------------------------------------
1 |
2 |
3 |
25 |
26 |
48 |
49 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/MonoBleedingEdge/etc/mono/4.5/Browsers/Compat.browser:
--------------------------------------------------------------------------------
1 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/MonoBleedingEdge/etc/mono/4.5/settings.map:
--------------------------------------------------------------------------------
1 |
2 |
3 |
25 |
26 |
48 |
49 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/MonoBleedingEdge/etc/mono/browscap.ini:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/MonoBleedingEdge/etc/mono/browscap.ini
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/MonoBleedingEdge/etc/mono/config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/MonoBleedingEdge/etc/mono/registry/last-btime:
--------------------------------------------------------------------------------
1 | 1590188007
2 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Plugins/lib_burst_generated.txt:
--------------------------------------------------------------------------------
1 | Library: lib_burst_generated
2 | --platform=Linux
3 | --backend=burst-llvm-9
4 | --target=X64_SSE2
5 | --dump=Function
6 | --float-precision=Standard
7 | --output=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Plugins\lib_burst_generated
8 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Barracuda.dll
9 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\UnityEngine.UI.dll
10 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.TextMeshPro.dll
11 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Timeline.dll
12 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Mathematics.dll
13 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Burst.dll
14 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.ML-Agents.dll
15 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Assembly-CSharp.dll
16 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.ML-Agents.CommunicatorObjects.dll
17 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Barracuda.BurstBLAS.dll
18 | --assembly-folder=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\
19 | --method=Unity.Burst.BurstCompiler+BurstCompilerHelper, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::IsBurstEnabled()--8c2be93e18276203cbd918daa2748a10
20 | --method=Unity.Burst.Intrinsics.X86, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::DoSetCSRTrampoline(System.Int32, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089)--da352d92cabf024fc9986011d52a4537
21 | --method=Unity.Burst.Intrinsics.X86, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::DoGetCSRTrampoline()--89425a97f3f500fa810ad03f0c382542
22 | --method=Unity.Jobs.IJobParallelForExtensions+ParallelForJobStruct`1[[Unity.Barracuda.UnsafeMatrixBlockMultiplyUnrolled8xhJob, Unity.Barracuda.BurstBLAS, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null]], UnityEngine.CoreModule, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::Execute(Unity.Barracuda.UnsafeMatrixBlockMultiplyUnrolled8xhJob&, Unity.Barracuda.BurstBLAS, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null|System.IntPtr, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089|System.IntPtr, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089|Unity.Jobs.LowLevel.Unsafe.JobRanges&, UnityEngine.CoreModule, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null|System.Int32, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089)--ee34a7dd1b8659539d2d5ec51926763d
23 | --platform=Linux
24 | --backend=burst-llvm-9
25 | --target=AVX2
26 | --dump=Function
27 | --float-precision=Standard
28 | --output=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Plugins\lib_burst_generated
29 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Barracuda.dll
30 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\UnityEngine.UI.dll
31 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.TextMeshPro.dll
32 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Timeline.dll
33 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Mathematics.dll
34 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Burst.dll
35 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.ML-Agents.dll
36 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Assembly-CSharp.dll
37 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.ML-Agents.CommunicatorObjects.dll
38 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Barracuda.BurstBLAS.dll
39 | --assembly-folder=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\
40 | --method=Unity.Burst.BurstCompiler+BurstCompilerHelper, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::IsBurstEnabled()--8c2be93e18276203cbd918daa2748a10
41 | --method=Unity.Burst.Intrinsics.X86, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::DoSetCSRTrampoline(System.Int32, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089)--da352d92cabf024fc9986011d52a4537
42 | --method=Unity.Burst.Intrinsics.X86, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::DoGetCSRTrampoline()--89425a97f3f500fa810ad03f0c382542
43 | --method=Unity.Jobs.IJobParallelForExtensions+ParallelForJobStruct`1[[Unity.Barracuda.UnsafeMatrixBlockMultiplyUnrolled8xhJob, Unity.Barracuda.BurstBLAS, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null]], UnityEngine.CoreModule, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::Execute(Unity.Barracuda.UnsafeMatrixBlockMultiplyUnrolled8xhJob&, Unity.Barracuda.BurstBLAS, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null|System.IntPtr, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089|System.IntPtr, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089|Unity.Jobs.LowLevel.Unsafe.JobRanges&, UnityEngine.CoreModule, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null|System.Int32, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089)--ee34a7dd1b8659539d2d5ec51926763d
44 |
45 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Resources/UnityPlayer.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Resources/UnityPlayer.png
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Resources/unity default resources:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Resources/unity default resources
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/Resources/unity_builtin_extra:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/Resources/unity_builtin_extra
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/app.info:
--------------------------------------------------------------------------------
1 | DefaultCompany
2 | Learning-Agents--r1
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/boot.config:
--------------------------------------------------------------------------------
1 | gfx-enable-gfx-jobs=1
2 | gfx-enable-native-gfx-jobs=1
3 | wait-for-native-debugger=0
4 | vr-enabled=0
5 | hdr-display-enabled=0
6 | headless=
7 |
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/globalgamemanagers:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/globalgamemanagers
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/globalgamemanagers.assets:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/globalgamemanagers.assets
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/level0:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/level0
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/level0.resS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/level0.resS
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/resources.assets:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/resources.assets
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/RL-agent_Data/sharedassets0.assets:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/RL-agent_Data/sharedassets0.assets
--------------------------------------------------------------------------------
/rl_env_binary/Linux_headless_build/UnityPlayer_s.debug:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Linux_headless_build/UnityPlayer_s.debug
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1.exe
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/ML-Agents/Timers/offline_stage_timers.json:
--------------------------------------------------------------------------------
1 | {"count":1,"self":13.423356799999999,"total":274.1704595,"children":{"AgentSendState":{"count":11788,"self":0.067023699999999992,"total":0.3537933,"children":{"CollectObservations":{"count":2361,"self":0.002002,"total":0.002002,"children":null},"CollectDiscreteActionMasks":{"count":2361,"self":0,"total":0,"children":null},"AgentInfo.ToProto":{"count":2361,"self":0.0238768,"total":0.2847676,"children":{"GenerateSensorData":{"count":2361,"self":0.0355881,"total":0.2608908,"children":{"RayPerceptionSensor.Perceive":{"count":2361,"self":0.2253027,"total":0.2253027,"children":null}}}}}}},"DecideAction":{"count":11788,"self":259.8618624,"total":259.8618674,"children":null},"AgentAct":{"count":11788,"self":0.5104168,"total":0.5104168,"children":{"AgentInfo.ToProto":{"count":2,"self":0,"total":0,"children":{"GenerateSensorData":{"count":2,"self":0,"total":0,"children":{"RayPerceptionSensor.Perceive":{"count":2,"self":0,"total":0,"children":null}}}}}}}},"gauges":{"FindFlag.CumulativeReward":{"count":2,"max":-1.00134146,"min":-1.01734519,"runningAverage":-1.00934339,"value":-1.01734519,"weightedAverage":-1.00534236}},"metadata":{"timer_format_version":"0.1.0","start_time_seconds":"1601096873","unity_version":"2019.3.15f1","command_line_arguments":"C:\\Users\\dhyey\\Desktop\\Train-ml-agents\\Offline_training\\.\/build_file\/Learning-Agents--r1.exe --mlagents-port 5005","communication_protocol_version":"1.0.0","com.unity.ml-agents_version":"1.0.0-preview","scene_name":"offline_stage","end_time_seconds":"1601097146"}}
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Assembly-CSharp.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Assembly-CSharp.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Google.Protobuf.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Google.Protobuf.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Grpc.Core.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Grpc.Core.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Mono.Posix.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Mono.Posix.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Mono.Security.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Mono.Security.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.ComponentModel.Composition.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.ComponentModel.Composition.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.Configuration.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.Configuration.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.Core.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.Core.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.IO.Abstractions.TestingHelpers.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.IO.Abstractions.TestingHelpers.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.IO.Abstractions.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.IO.Abstractions.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.Interactive.Async.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.Interactive.Async.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.Runtime.Serialization.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.Runtime.Serialization.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.Runtime.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.Runtime.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.Security.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.Security.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.ServiceModel.Internals.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.ServiceModel.Internals.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.Xml.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.Xml.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/System.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.Barracuda.BurstBLAS.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.Barracuda.BurstBLAS.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.Barracuda.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.Barracuda.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.Burst.Unsafe.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.Burst.Unsafe.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.Burst.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.Burst.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.ML-Agents.CommunicatorObjects.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.ML-Agents.CommunicatorObjects.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.ML-Agents.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.ML-Agents.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.Mathematics.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.Mathematics.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.TextMeshPro.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.TextMeshPro.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.Timeline.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/Unity.Timeline.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.AIModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.AIModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ARModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ARModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.AccessibilityModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.AccessibilityModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.AndroidJNIModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.AndroidJNIModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.AnimationModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.AnimationModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.AssetBundleModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.AssetBundleModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.AudioModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.AudioModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ClothModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ClothModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ClusterInputModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ClusterInputModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ClusterRendererModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ClusterRendererModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.CoreModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.CoreModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.CrashReportingModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.CrashReportingModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.DSPGraphModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.DSPGraphModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.DirectorModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.DirectorModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.GameCenterModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.GameCenterModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.GridModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.GridModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.HotReloadModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.HotReloadModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.IMGUIModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.IMGUIModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ImageConversionModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ImageConversionModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.InputLegacyModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.InputLegacyModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.InputModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.InputModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.JSONSerializeModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.JSONSerializeModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.LocalizationModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.LocalizationModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ParticleSystemModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ParticleSystemModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.PerformanceReportingModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.PerformanceReportingModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.Physics2DModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.Physics2DModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.PhysicsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.PhysicsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ProfilerModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ProfilerModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ScreenCaptureModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.ScreenCaptureModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.SharedInternalsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.SharedInternalsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.SpriteMaskModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.SpriteMaskModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.SpriteShapeModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.SpriteShapeModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.StreamingModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.StreamingModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.SubstanceModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.SubstanceModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.SubsystemsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.SubsystemsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.TLSModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.TLSModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.TerrainModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.TerrainModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.TerrainPhysicsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.TerrainPhysicsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.TextCoreModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.TextCoreModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.TextRenderingModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.TextRenderingModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.TilemapModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.TilemapModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UI.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UI.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UIElementsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UIElementsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UIModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UIModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UNETModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UNETModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UmbraModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UmbraModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityAnalyticsModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityAnalyticsModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityConnectModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityConnectModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityTestProtocolModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityTestProtocolModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityWebRequestAssetBundleModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityWebRequestAssetBundleModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityWebRequestAudioModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityWebRequestAudioModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityWebRequestModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityWebRequestModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityWebRequestTextureModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityWebRequestTextureModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityWebRequestWWWModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.UnityWebRequestWWWModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.VFXModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.VFXModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.VRModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.VRModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.VehiclesModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.VehiclesModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.VideoModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.VideoModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.WindModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.WindModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.XRModule.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.XRModule.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/UnityEngine.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/mscorlib.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Managed/mscorlib.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Plugins/x86_64/grpc_csharp_ext.x64.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Plugins/x86_64/grpc_csharp_ext.x64.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Plugins/x86_64/lib_burst_generated.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Plugins/x86_64/lib_burst_generated.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Plugins/x86_64/lib_burst_generated.txt:
--------------------------------------------------------------------------------
1 | Library: lib_burst_generated
2 | --platform=Windows
3 | --backend=burst-llvm-9
4 | --target=X64_SSE2
5 | --dump=Function
6 | --float-precision=Standard
7 | --format=Coff
8 | --output=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Plugins\x86_64\lib_burst_generated
9 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Barracuda.dll
10 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\UnityEngine.UI.dll
11 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.TextMeshPro.dll
12 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Timeline.dll
13 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Mathematics.dll
14 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Burst.dll
15 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.ML-Agents.dll
16 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Assembly-CSharp.dll
17 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.ML-Agents.CommunicatorObjects.dll
18 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Barracuda.BurstBLAS.dll
19 | --assembly-folder=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\
20 | --method=Unity.Burst.BurstCompiler+BurstCompilerHelper, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::IsBurstEnabled()--8c2be93e18276203cbd918daa2748a10
21 | --method=Unity.Burst.Intrinsics.X86, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::DoSetCSRTrampoline(System.Int32, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089)--da352d92cabf024fc9986011d52a4537
22 | --method=Unity.Burst.Intrinsics.X86, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::DoGetCSRTrampoline()--89425a97f3f500fa810ad03f0c382542
23 | --method=Unity.Jobs.IJobParallelForExtensions+ParallelForJobStruct`1[[Unity.Barracuda.UnsafeMatrixBlockMultiplyUnrolled8xhJob, Unity.Barracuda.BurstBLAS, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null]], UnityEngine.CoreModule, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::Execute(Unity.Barracuda.UnsafeMatrixBlockMultiplyUnrolled8xhJob&, Unity.Barracuda.BurstBLAS, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null|System.IntPtr, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089|System.IntPtr, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089|Unity.Jobs.LowLevel.Unsafe.JobRanges&, UnityEngine.CoreModule, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null|System.Int32, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089)--ee34a7dd1b8659539d2d5ec51926763d
24 | --platform=Windows
25 | --backend=burst-llvm-9
26 | --target=AVX2
27 | --dump=Function
28 | --float-precision=Standard
29 | --format=Coff
30 | --output=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Plugins\x86_64\lib_burst_generated
31 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Barracuda.dll
32 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\UnityEngine.UI.dll
33 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.TextMeshPro.dll
34 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Timeline.dll
35 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Mathematics.dll
36 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Burst.dll
37 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.ML-Agents.dll
38 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Assembly-CSharp.dll
39 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.ML-Agents.CommunicatorObjects.dll
40 | --root-assembly=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\Unity.Barracuda.BurstBLAS.dll
41 | --assembly-folder=C:\Users\dhyey\Desktop\Unity_games\7- ML-Agents with TF\Learning-Agents--r1\Temp\StagingArea\Data\Managed\
42 | --method=Unity.Burst.BurstCompiler+BurstCompilerHelper, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::IsBurstEnabled()--8c2be93e18276203cbd918daa2748a10
43 | --method=Unity.Burst.Intrinsics.X86, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::DoSetCSRTrampoline(System.Int32, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089)--da352d92cabf024fc9986011d52a4537
44 | --method=Unity.Burst.Intrinsics.X86, Unity.Burst, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::DoGetCSRTrampoline()--89425a97f3f500fa810ad03f0c382542
45 | --method=Unity.Jobs.IJobParallelForExtensions+ParallelForJobStruct`1[[Unity.Barracuda.UnsafeMatrixBlockMultiplyUnrolled8xhJob, Unity.Barracuda.BurstBLAS, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null]], UnityEngine.CoreModule, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null::Execute(Unity.Barracuda.UnsafeMatrixBlockMultiplyUnrolled8xhJob&, Unity.Barracuda.BurstBLAS, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null|System.IntPtr, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089|System.IntPtr, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089|Unity.Jobs.LowLevel.Unsafe.JobRanges&, UnityEngine.CoreModule, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null|System.Int32, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089)--ee34a7dd1b8659539d2d5ec51926763d
46 |
47 |
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Resources/unity default resources:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Resources/unity default resources
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Resources/unity_builtin_extra:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/Resources/unity_builtin_extra
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/app.info:
--------------------------------------------------------------------------------
1 | DefaultCompany
2 | Learning-Agents--r1
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/boot.config:
--------------------------------------------------------------------------------
1 | gfx-enable-gfx-jobs=1
2 | gfx-enable-native-gfx-jobs=1
3 | wait-for-native-debugger=0
4 | vr-enabled=0
5 | hdr-display-enabled=0
6 |
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/globalgamemanagers:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/globalgamemanagers
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/globalgamemanagers.assets:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/globalgamemanagers.assets
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/level0:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/level0
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/level0.resS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/level0.resS
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/resources.assets:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/resources.assets
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/Learning-Agents--r1_Data/sharedassets0.assets:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/Learning-Agents--r1_Data/sharedassets0.assets
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/MonoBleedingEdge/EmbedRuntime/MonoPosixHelper.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/MonoBleedingEdge/EmbedRuntime/MonoPosixHelper.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/MonoBleedingEdge/EmbedRuntime/mono-2.0-bdwgc.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/MonoBleedingEdge/EmbedRuntime/mono-2.0-bdwgc.dll
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/MonoBleedingEdge/etc/mono/2.0/Browsers/Compat.browser:
--------------------------------------------------------------------------------
1 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/MonoBleedingEdge/etc/mono/2.0/settings.map:
--------------------------------------------------------------------------------
1 |
2 |
3 |
25 |
26 |
48 |
49 |
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/MonoBleedingEdge/etc/mono/4.0/Browsers/Compat.browser:
--------------------------------------------------------------------------------
1 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/MonoBleedingEdge/etc/mono/4.0/settings.map:
--------------------------------------------------------------------------------
1 |
2 |
3 |
25 |
26 |
48 |
49 |
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/MonoBleedingEdge/etc/mono/4.5/Browsers/Compat.browser:
--------------------------------------------------------------------------------
1 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/MonoBleedingEdge/etc/mono/4.5/settings.map:
--------------------------------------------------------------------------------
1 |
2 |
3 |
25 |
26 |
48 |
49 |
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/MonoBleedingEdge/etc/mono/browscap.ini:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/MonoBleedingEdge/etc/mono/browscap.ini
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/MonoBleedingEdge/etc/mono/config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/UnityCrashHandler64.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/UnityCrashHandler64.exe
--------------------------------------------------------------------------------
/rl_env_binary/Windows_build/UnityPlayer.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dhyeythumar/PPO-algo-with-custom-Unity-environment/887ebeae522cb7b374a42c8d2825de04272e7c7e/rl_env_binary/Windows_build/UnityPlayer.dll
--------------------------------------------------------------------------------
/statistics.py:
--------------------------------------------------------------------------------
1 | from tensorboardX import SummaryWriter
2 | import numpy as np
3 |
4 |
5 | class Memory:
6 | """
7 | This class is used to store the data for Tensorboard summary and Terminal logs.
8 |
9 | Length of the data stored is equal to SUMMARY_FREQ used while training.
10 | Data length = BUFFER_SIZE is crunched to a single value before stored in this class.
11 | """
12 |
13 | def __init__(self, RUN_ID):
14 | self.base_tb_dir = "./training_data/summaries/" + RUN_ID
15 | self.writer = SummaryWriter(self.base_tb_dir)
16 |
17 | # lists to store data length = SUMMARY_FREQ
18 | self.rewards = []
19 | self.episode_lens = []
20 | self.actor_losses = []
21 | self.critic_losses = []
22 | self.advantages = []
23 | self.actor_lrs = [] # actor learning rate
24 | self.critic_lrs = [] # critic learning rate
25 |
26 | def add_data(
27 | self,
28 | reward,
29 | episode_len,
30 | actor_loss,
31 | critic_loss,
32 | advantage,
33 | actor_lr,
34 | critic_lr,
35 | ):
36 | """Add data for tensorboard and terminal logging."""
37 | self.rewards.append(reward)
38 | self.episode_lens.append(episode_len)
39 | self.actor_losses.append(actor_loss)
40 | self.critic_losses.append(critic_loss)
41 | self.advantages.append(advantage)
42 | self.actor_lrs.append(actor_lr)
43 | self.critic_lrs.append(critic_lr)
44 |
45 | def clear_memory(self):
46 | """Clear the collected data."""
47 | self.rewards.clear()
48 | self.episode_lens.clear()
49 | self.actor_losses.clear()
50 | self.critic_losses.clear()
51 | self.advantages.clear()
52 | self.actor_lrs.clear()
53 | self.critic_lrs.clear()
54 |
55 | def terminal_logs(self, step):
56 | """Display logs on terminal."""
57 | if len(self.rewards) == 0:
58 | self.rewards.append(0)
59 |
60 | print(
61 | "[INFO]\tSteps: {}\tMean Reward: {:0.3f}\tStd of Reward: {:0.3f}".format(
62 | step, np.mean(self.rewards), np.std(self.rewards)
63 | )
64 | )
65 |
66 | def tensorboard_logs(self, step):
67 | """Store the logs for tensorboard vis."""
68 | self.writer.add_scalar(
69 | "Environment/Cumulative_reward", np.mean(self.rewards), step
70 | )
71 | self.writer.add_scalar(
72 | "Environment/Episode_length", np.mean(self.episode_lens), step
73 | )
74 |
75 | self.writer.add_scalar(
76 | "Learning_rate/Actor_model", np.mean(self.actor_lrs), step
77 | )
78 | self.writer.add_scalar(
79 | "Learning_rate/Critic_model", np.mean(self.critic_lrs), step
80 | )
81 |
82 | self.writer.add_scalar("Loss/Policy_loss", np.mean(self.actor_losses), step)
83 | self.writer.add_scalar("Loss/Value_loss", np.mean(self.critic_losses), step)
84 |
85 | self.writer.add_scalar("Policy/Value_estimate", np.mean(self.advantages), step)
86 |
87 | self.clear_memory()
88 |
--------------------------------------------------------------------------------
/test.py:
--------------------------------------------------------------------------------
1 | from mlagents_envs.environment import UnityEnvironment
2 | from mlagents_envs.side_channel.engine_configuration_channel import (
3 | EngineConfigurationChannel,
4 | )
5 | from mlagents_envs.exception import (
6 | UnityEnvironmentException,
7 | UnityCommunicationException,
8 | UnityCommunicatorStoppedException,
9 | )
10 | from pathlib import Path
11 | import numpy as np
12 | from typing import Deque, Dict, List, Tuple
13 | from keras.models import load_model
14 |
15 | # import keras.backend as K
16 | # import tensorflow as tf
17 |
18 |
19 | # Name of the Unity environment binary to be launched
20 | ENV_NAME = "./rl_env_binary/Windows_build/Learning-Agents--r1"
21 | RUN_ID = "train-1"
22 |
23 |
24 | class Test_FindflagAgent:
25 | def __init__(self, env: UnityEnvironment):
26 | MODEL_NAME = self.get_model_name()
27 | self.env = env
28 | self.env.reset() # without this env won't work
29 | self.behavior_name = self.env.get_behavior_names()[0]
30 | self.behavior_spec = self.env.get_behavior_spec(self.behavior_name)
31 | self.state_dims = np.sum(self.behavior_spec.observation_shapes)
32 | self.n_actions = self.behavior_spec.action_size
33 |
34 | self.actor = load_model(
35 | MODEL_NAME, custom_objects={"loss": "categorical_hinge"}
36 | )
37 |
38 | def get_model_name(self) -> str:
39 | """Get the latest saved actor model name."""
40 | _dir = "./training_data/model/" + RUN_ID
41 | basepath = Path(_dir)
42 | files_in_basepath = (entry for entry in basepath.iterdir() if entry.is_file())
43 |
44 | for item in files_in_basepath:
45 | if item.name.find("actor") != -1:
46 | name = _dir + "/" + item.name
47 | print("-" * 100)
48 | print("\t\tUsing {} saved model for testing.".format(name))
49 | print("-" * 100)
50 | return name
51 |
52 | def check_done(self, step_result) -> bool:
53 | """Return the done status for env reset."""
54 | if len(step_result[1]) != 0:
55 | return True
56 | else:
57 | return False
58 |
59 | def step(self, action: np.ndarray) -> Tuple[np.ndarray, np.float64, bool]:
60 | """
61 | Apply the actions to the env, step the env and return new set of experience.
62 |
63 | Return the next_obs, reward and done response of the env.
64 | """
65 | self.env.set_actions(self.behavior_name, action)
66 | self.env.step()
67 | step_result = self.env.get_steps(self.behavior_name)
68 | done = self.check_done(step_result)
69 | next_obs = np.array([]) # store next observations
70 |
71 | if not done:
72 | for obs in step_result[0].obs:
73 | next_obs = np.append(next_obs, obs) # shape(54,)
74 | reward = step_result[0].reward[0]
75 | else:
76 | for obs in step_result[1].obs:
77 | next_obs = np.append(next_obs, obs)
78 | reward = step_result[1].reward[0]
79 | return next_obs, reward, done
80 |
81 | def get_action(self, action_probs: np.ndarray) -> np.ndarray:
82 | """Get actions from action probablities."""
83 | n_agents = 1 # only 1 agent is used in the env.
84 |
85 | action = action_probs[0]
86 | action = np.clip(action, -1, 1) # just for confirmation
87 | return np.reshape(action, (n_agents, self.n_actions))
88 |
89 | def test(self) -> None:
90 | """Test the trained Actor model."""
91 | self.env.reset()
92 | score = 0
93 | step_result = self.env.get_steps(self.behavior_name)
94 | observation = np.array([])
95 | for obs in step_result[0].obs:
96 | observation = np.append(observation, obs)
97 |
98 | try:
99 | while True:
100 | observation = np.expand_dims(observation, axis=0) # shape(1, 54)
101 | action_probs = self.actor.predict(observation, steps=1) # (1, 2)
102 | action = self.get_action(action_probs)
103 | next_obs, reward, done = self.step(action)
104 | observation = next_obs
105 | score += reward
106 | if done:
107 | print("Score :: ", score)
108 | score = 0
109 | self.env.reset()
110 | except (
111 | KeyboardInterrupt,
112 | UnityCommunicationException,
113 | UnityEnvironmentException,
114 | UnityCommunicatorStoppedException,
115 | ) as ex:
116 | print("-" * 100)
117 | print("\t\tException has occured !!\tTesting was interrupted.")
118 | print("-" * 100)
119 | self.env.close()
120 |
121 |
122 | if __name__ == "__main__":
123 | engine_config_channel = EngineConfigurationChannel()
124 | engine_config_channel.set_configuration_parameters(
125 | width=1800, height=900, time_scale=1.0
126 | )
127 |
128 | env = UnityEnvironment(
129 | file_name=ENV_NAME, seed=0, side_channels=[engine_config_channel]
130 | )
131 |
132 | agent = Test_FindflagAgent(env)
133 | agent.test()
134 |
--------------------------------------------------------------------------------