Reduce the number of unexpected warnings during testing (#3050)

* Initial commit

* Remove unnecessary `warnings.simplefilters`

* Fix tests for new step API

* Fix testing

Co-authored-by: Markus28 <montcyril@gmail.com>
This commit is contained in:
Mark Towers
2022-08-30 19:47:26 +01:00
committed by GitHub
parent 54b406b799
commit ab3e02db83
11 changed files with 133 additions and 104 deletions

View File

@@ -1,4 +1,5 @@
import pickle
import warnings
import pytest
@@ -39,13 +40,13 @@ CHECK_ENV_IGNORE_WARNINGS = [
)
def test_envs_pass_env_checker(spec):
"""Check that all environments pass the environment checker with no warnings other than the expected."""
with pytest.warns(None) as warnings:
with warnings.catch_warnings(record=True) as caught_warnings:
env = spec.make(disable_env_checker=True).unwrapped
check_env(env)
env.close()
for warning in warnings.list:
for warning in caught_warnings:
if warning.message.args[0] not in CHECK_ENV_IGNORE_WARNINGS:
print()
print(warning.message.args[0])

View File

@@ -1,6 +1,7 @@
"""Tests that gym.make works as expected."""
import re
import warnings
from copy import deepcopy
import numpy as np
@@ -52,6 +53,7 @@ def test_make():
def test_make_deprecated():
with warnings.catch_warnings(record=True):
with pytest.raises(
gym.error.Error,
match=re.escape(
@@ -133,7 +135,7 @@ def test_make_disable_env_checker():
"spec", all_testing_env_specs, ids=[spec.id for spec in all_testing_env_specs]
)
def test_passive_checker_wrapper_warnings(spec):
with pytest.warns(None) as warnings:
with warnings.catch_warnings(record=True) as caught_warnings:
env = gym.make(spec) # disable_env_checker=False
env.reset()
env.step(env.action_space.sample())
@@ -141,7 +143,7 @@ def test_passive_checker_wrapper_warnings(spec):
env.close()
for warning in warnings.list:
for warning in caught_warnings:
if warning.message.args[0] not in PASSIVE_CHECK_IGNORE_WARNING:
raise gym.error.Error(f"Unexpected warning: {warning.message}")
@@ -189,14 +191,14 @@ def test_make_render_mode():
env.close()
assert len(valid_render_modes) > 0
with pytest.warns(None) as warnings:
with warnings.catch_warnings(record=True) as caught_warnings:
env = gym.make(
"CartPole-v1", render_mode=valid_render_modes[0], disable_env_checker=True
)
assert env.render_mode == valid_render_modes[0]
env.close()
for warning in warnings.list:
for warning in caught_warnings:
if not re.compile(".*step API.*").match(warning.message.args[0]):
raise gym.error.Error(f"Unexpected warning: {warning.message}")
@@ -207,7 +209,7 @@ def test_make_render_mode():
env.close()
with pytest.warns(
Warning,
UserWarning,
match=re.escape(
"You are trying to use 'human' rendering for an environment that doesn't natively support it. The HumanRendering wrapper is being applied to your environment."
),
@@ -228,13 +230,16 @@ def test_make_render_mode():
)
# Make sure that an additional error is thrown a user tries to use the wrapper on an environment with old API
with warnings.catch_warnings(record=True):
with pytest.raises(
gym.error.Error,
match=re.escape(
"You passed render_mode='human' although test/NoHumanOldAPI-v0 doesn't implement human-rendering natively."
),
):
gym.make("test/NoHumanOldAPI-v0", render_mode="human", disable_env_checker=True)
gym.make(
"test/NoHumanOldAPI-v0", render_mode="human", disable_env_checker=True
)
# This test ensures that the additional exception "Gym tried to apply the HumanRendering wrapper but it looks like
# your environment is using the old rendering API" is *not* triggered by a TypeError that originate from

View File

@@ -1,4 +1,5 @@
import re
import warnings
import numpy as np
import pytest
@@ -57,9 +58,11 @@ def test_box_shape_inference(box, expected_shape):
)
def test_box_values(value, valid):
if valid:
with pytest.warns(None) as warnings:
with warnings.catch_warnings(record=True) as caught_warnings:
Box(low=value, high=value)
assert len(warnings.list) == 0, tuple(warning.message for warning in warnings)
assert len(caught_warnings) == 0, tuple(
warning.message for warning in caught_warnings
)
else:
with pytest.raises(
ValueError,

View File

@@ -47,10 +47,10 @@ from tests.testing_env import GenericTestEnv
)
def test_no_error_warnings(env):
"""A full version of this test with all gym envs is run in tests/envs/test_envs.py."""
with pytest.warns(None) as warnings:
with warnings.catch_warnings(record=True) as caught_warnings:
check_env(env)
assert len(warnings) == 0, [warning.message for warning in warnings]
assert len(caught_warnings) == 0, [warning.message for warning in caught_warnings]
def _no_super_reset(self, seed=None, options=None):

View File

@@ -1,4 +1,5 @@
import re
import warnings
from typing import Dict, Union
import numpy as np
@@ -112,10 +113,10 @@ def test_check_observation_space(test, space, message: str):
):
check_observation_space(space)
else:
with pytest.warns(None) as warnings:
with warnings.catch_warnings(record=True) as caught_warnings:
with pytest.raises(test, match=f"^{re.escape(message)}$"):
check_observation_space(space)
assert len(warnings) == 0
assert len(caught_warnings) == 0
@pytest.mark.parametrize(
@@ -181,10 +182,10 @@ def test_check_action_space(
):
check_action_space(space)
else:
with pytest.warns(None) as warnings:
with warnings.catch_warnings(record=True) as caught_warnings:
with pytest.raises(test, match=f"^{re.escape(message)}$"):
check_action_space(space)
assert len(warnings) == 0
assert len(caught_warnings) == 0
@pytest.mark.parametrize(
@@ -236,10 +237,10 @@ def test_check_obs(test, obs, obs_space: spaces.Space, message: str):
):
check_obs(obs, obs_space, "testing")
else:
with pytest.warns(None) as warnings:
with warnings.catch_warnings(record=True) as caught_warnings:
with pytest.raises(test, match=f"^{re.escape(message)}$"):
check_obs(obs, obs_space, "testing")
assert len(warnings) == 0
assert len(caught_warnings) == 0
def _reset_no_seed(self, options=None):
@@ -304,12 +305,10 @@ def test_passive_env_reset_checker(test, func: callable, message: str, kwargs: D
):
env_reset_passive_checker(GenericTestEnv(reset_fn=func), **kwargs)
else:
with pytest.warns(None) as warnings:
with warnings.catch_warnings(record=True) as caught_warnings:
with pytest.raises(test, match=f"^{re.escape(message)}$"):
env_reset_passive_checker(GenericTestEnv(reset_fn=func), **kwargs)
for warning in warnings:
print(warning)
assert len(warnings) == 0
assert len(caught_warnings) == 0
def _modified_step(
@@ -386,10 +385,10 @@ def test_passive_env_step_checker(
):
env_step_passive_checker(GenericTestEnv(step_fn=func), 0)
else:
with pytest.warns(None) as warnings:
with warnings.catch_warnings(record=True) as caught_warnings:
with pytest.raises(test, match=f"^{re.escape(message)}$"):
env_step_passive_checker(GenericTestEnv(step_fn=func), 0)
assert len(warnings) == 0, [warning for warning in warnings.list]
assert len(caught_warnings) == 0, caught_warnings
@pytest.mark.parametrize(
@@ -447,7 +446,7 @@ def test_passive_render_checker(test, env: GenericTestEnv, message: str):
):
env_render_passive_checker(env)
else:
with pytest.warns(None) as warnings:
with warnings.catch_warnings(record=True) as caught_warnings:
with pytest.raises(test, match=f"^{re.escape(message)}$"):
env_render_passive_checker(env)
assert len(warnings) == 0
assert len(caught_warnings) == 0

View File

@@ -1,6 +1,8 @@
import os
import shutil
import numpy as np
import gym
from gym.utils.save_video import capped_cubic_video_schedule, save_video
@@ -13,8 +15,8 @@ def test_record_video_using_default_trigger():
episode_index = 0
for step_index in range(199):
action = env.action_space.sample()
_, _, done, _ = env.step(action)
if done:
_, _, terminated, truncated, _ = env.step(action)
if terminated or truncated:
save_video(
env.render(),
"videos",
@@ -51,8 +53,8 @@ def test_record_video_step_trigger():
episode_index = 0
for step_index in range(199):
action = env.action_space.sample()
_, _, done, _ = env.step(action)
if done:
_, _, terminated, truncated, _ = env.step(action)
if terminated or truncated:
save_video(
env.render(),
"videos",
@@ -76,16 +78,15 @@ def test_record_video_within_vector():
envs = gym.vector.make(
"CartPole-v1", num_envs=2, asynchronous=True, render_mode="rgb_array"
)
envs = gym.wrappers.RecordEpisodeStatistics(envs)
envs.reset()
episode_frames = []
step_starting_index = 0
episode_index = 0
for step_index in range(199):
_, _, _, infos = envs.step(envs.action_space.sample())
_, _, terminated, truncated, _ = envs.step(envs.action_space.sample())
episode_frames.extend(envs.call("render")[0])
if "episode" in infos and infos["_episode"][0]:
if np.any(np.logical_or(terminated, truncated)):
save_video(
episode_frames,
"videos",
@@ -97,6 +98,7 @@ def test_record_video_within_vector():
episode_frames = []
step_starting_index = step_index + 1
episode_index += 1
envs.close()
assert os.path.isdir("videos")
mp4_files = [file for file in os.listdir("videos") if file.endswith(".mp4")]

View File

@@ -1,3 +1,4 @@
import re
from multiprocessing import TimeoutError
import numpy as np
@@ -178,62 +179,74 @@ def test_step_timeout_async_vector_env(shared_memory):
env.close(terminate=True)
@pytest.mark.filterwarnings("ignore::UserWarning")
@pytest.mark.parametrize("shared_memory", [True, False])
def test_reset_out_of_order_async_vector_env(shared_memory):
env_fns = [make_env("CartPole-v1", i) for i in range(4)]
env = AsyncVectorEnv(env_fns, shared_memory=shared_memory)
with pytest.raises(NoAsyncCallError):
try:
with pytest.raises(
NoAsyncCallError,
match=re.escape(
"Calling `reset_wait` without any prior call to `reset_async`."
),
):
env.reset_wait()
except NoAsyncCallError as exception:
assert exception.name == "reset"
raise
env.close(terminate=True)
env = AsyncVectorEnv(env_fns, shared_memory=shared_memory)
with pytest.raises(AlreadyPendingCallError):
try:
with pytest.raises(
AlreadyPendingCallError,
match=re.escape(
"Calling `reset_async` while waiting for a pending call to `step` to complete"
),
):
actions = env.action_space.sample()
env.reset()
env.step_async(actions)
env.reset_async()
except NoAsyncCallError as exception:
assert exception.name == "step"
raise
with pytest.warns(
UserWarning,
match=re.escape(
"Calling `close` while waiting for a pending call to `step` to complete."
),
):
env.close(terminate=True)
@pytest.mark.filterwarnings("ignore::UserWarning")
@pytest.mark.parametrize("shared_memory", [True, False])
def test_step_out_of_order_async_vector_env(shared_memory):
env_fns = [make_env("CartPole-v1", i) for i in range(4)]
env = AsyncVectorEnv(env_fns, shared_memory=shared_memory)
with pytest.raises(NoAsyncCallError):
try:
with pytest.raises(
NoAsyncCallError,
match=re.escape("Calling `step_wait` without any prior call to `step_async`."),
):
env.action_space.sample()
env.reset()
env.step_wait()
except AlreadyPendingCallError as exception:
assert exception.name == "step"
raise
env.close(terminate=True)
env = AsyncVectorEnv(env_fns, shared_memory=shared_memory)
with pytest.raises(AlreadyPendingCallError):
try:
with pytest.raises(
AlreadyPendingCallError,
match=re.escape(
"Calling `step_async` while waiting for a pending call to `reset` to complete"
),
):
actions = env.action_space.sample()
env.reset_async()
env.step_async(actions)
except AlreadyPendingCallError as exception:
assert exception.name == "reset"
raise
with pytest.warns(
UserWarning,
match=re.escape(
"Calling `close` while waiting for a pending call to `reset` to complete."
),
):
env.close(terminate=True)

View File

@@ -1,5 +1,4 @@
"""Tests the gym.wrapper.AutoResetWrapper operates as expected."""
from typing import Generator, Optional
from unittest.mock import MagicMock
@@ -64,7 +63,6 @@ def test_make_autoreset_true(spec):
Note: This test assumes that all first-party environments will terminate in a finite
amount of time with random actions, which is true as of the time of adding this test.
"""
with pytest.warns(None):
env = gym.make(spec.id, autoreset=True, disable_env_checker=True)
assert AutoResetWrapper in unwrap_env(env)
@@ -84,17 +82,14 @@ def test_make_autoreset_true(spec):
)
def test_gym_make_autoreset(spec):
"""Tests that `gym.make` autoreset wrapper is applied only when `gym.make(..., autoreset=True)`."""
with pytest.warns(None):
env = gym.make(spec.id, disable_env_checker=True)
assert AutoResetWrapper not in unwrap_env(env)
env.close()
with pytest.warns(None):
env = gym.make(spec.id, autoreset=False, disable_env_checker=True)
assert AutoResetWrapper not in unwrap_env(env)
env.close()
with pytest.warns(None):
env = gym.make(spec.id, autoreset=True, disable_env_checker=True)
assert AutoResetWrapper in unwrap_env(env)
env.close()

View File

@@ -1,4 +1,5 @@
import re
import warnings
import numpy as np
import pytest
@@ -16,7 +17,7 @@ from tests.testing_env import GenericTestEnv
ids=[env.spec.id for env in all_testing_initialised_envs],
)
def test_passive_checker_wrapper_warnings(env):
with pytest.warns(None) as warnings:
with warnings.catch_warnings(record=True) as caught_warnings:
checker_env = PassiveEnvChecker(env)
checker_env.reset()
checker_env.step(checker_env.action_space.sample())
@@ -24,7 +25,7 @@ def test_passive_checker_wrapper_warnings(env):
checker_env.close()
for warning in warnings.list:
for warning in caught_warnings:
if warning.message.args[0] not in PASSIVE_CHECK_IGNORE_WARNING:
raise gym.error.Error(f"Unexpected warning: {warning.message}")

View File

@@ -10,7 +10,7 @@ from gym.wrappers.pixel_observation import STATE_KEY, PixelObservationWrapper
class FakeEnvironment(gym.Env):
def __init__(self, render_mode=None):
def __init__(self, render_mode="single_rgb_array"):
self.action_space = spaces.Box(shape=(1,), low=-1, high=1, dtype=np.float32)
self.render_mode = render_mode

View File

@@ -1,5 +1,6 @@
import gc
import os
import re
import time
import pytest
@@ -74,14 +75,23 @@ def test_no_frames():
def test_record_unrecordable_method():
with pytest.warns(
UserWarning,
match="Disabling video recorder because environment <UnrecordableEnv instance> was not initialized with any compatible video mode between `single_rgb_array` and `rgb_array`",
):
env = UnrecordableEnv()
rec = VideoRecorder(env)
assert not rec.enabled
rec.close()
@pytest.mark.filterwarnings("ignore:.*Env returned None on render.*")
def test_record_breaking_render_method():
with pytest.warns(
UserWarning,
match=re.escape(
"Env returned None on `render()`. Disabling further rendering for video recorder by marking as disabled:"
),
):
env = BrokenRecordableEnv()
rec = VideoRecorder(env)
rec.capture_frame()