Files
Gymnasium/examples/agents/random_agent.py

69 lines
2.4 KiB
Python
Raw Normal View History

import argparse
2016-04-27 08:00:58 -07:00
import logging
import sys
2016-04-27 08:00:58 -07:00
import gym
from gym import wrappers
2016-04-27 08:00:58 -07:00
class RandomAgent(object):
"""The world's simplest agent!"""
2016-04-27 08:00:58 -07:00
def __init__(self, action_space):
self.action_space = action_space
def act(self, observation, reward, done):
return self.action_space.sample()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=None)
parser.add_argument('env_id', nargs='?', default='CartPole-v0', help='Select the environment to run')
args = parser.parse_args()
# Call `undo_logger_setup` if you want to undo Gym's logger setup
# and configure things manually. (The default should be fine most
# of the time.)
gym.undo_logger_setup()
2016-04-27 08:00:58 -07:00
logger = logging.getLogger()
formatter = logging.Formatter('[%(asctime)s] %(message)s')
handler = logging.StreamHandler(sys.stderr)
handler.setFormatter(formatter)
logger.addHandler(handler)
# You can set the level to logging.DEBUG or logging.WARN if you
# want to change the amount of output.
2016-04-27 08:00:58 -07:00
logger.setLevel(logging.INFO)
env = gym.make(args.env_id)
2016-04-27 08:00:58 -07:00
# You provide the directory to write to (can be an existing
2016-05-26 13:44:14 -07:00
# directory, including one with existing data -- all monitor files
# will be namespaced). You can also dump to a tempdir if you'd
# like: tempfile.mkdtemp().
2016-04-27 08:00:58 -07:00
outdir = '/tmp/random-agent-results'
env = wrappers.Monitor(directory=outdir, force=True)(env)
env.seed(0)
[WIP] add support for seeding environments (#135) * Make environments seedable * Fix monitor bugs - Set monitor_id before setting the infix. This was a bug that would yield incorrect results with multiple monitors. - Remove extra pid from stats recorder filename. This should be purely cosmetic. * Start uploading seeds in episode_batch * Fix _bigint_from_bytes for python3 * Set seed explicitly in random_agent * Pass through seed argument * Also pass through random state to spaces * Pass random state into the observation/action spaces * Make all _seed methods return the list of used seeds * Switch over to np.random where possible * Start hashing seeds, and also seed doom engine * Fixup seeding determinism in many cases * Seed before loading the ROM * Make seeding more Python3 friendly * Make the MuJoCo skipping a bit more forgiving * Remove debugging PDB calls * Make setInt argument into raw bytes * Validate and upload seeds * Skip box2d * Make seeds smaller, and change representation of seeds in upload * Handle long seeds * Fix RandomAgent example to be deterministic * Handle integer types correctly in Python2 and Python3 * Try caching pip * Try adding swap * Add df and free calls * Bump swap * Bump swap size * Try setting overcommit * Try other sysctls * Try fixing overcommit * Try just setting overcommit_memory=1 * Add explanatory comment * Add what's new section to readme * BUG: Mark ElevatorAction-ram-v0 as non-deterministic for now * Document seed * Move nondetermistic check into spec
2016-05-29 09:07:09 -07:00
agent = RandomAgent(env.action_space)
2016-04-27 08:00:58 -07:00
episode_count = 100
2016-04-27 08:00:58 -07:00
reward = 0
done = False
for i in range(episode_count):
2016-04-27 08:00:58 -07:00
ob = env.reset()
while True:
2016-04-27 08:00:58 -07:00
action = agent.act(ob, reward, done)
ob, reward, done, _ = env.step(action)
if done:
break
# Note there's no env.render() here. But the environment still can open window and
# render if asked by env.monitor: it calls env.render('rgb_array') to record video.
# Video is not recorded every episode, see capped_cubic_video_schedule for details.
2016-04-27 08:00:58 -07:00
# Close the env and write monitor result info to disk
env.close()
2016-04-27 08:00:58 -07:00
# Upload to the scoreboard. We could also do this from another
# process if we wanted.
logger.info("Successfully ran RandomAgent. Now trying to upload results to the scoreboard. If it breaks, you can always just try re-uploading the same results.")
gym.upload(outdir)