Skip to content

Commit

Permalink
Merge pull request Significant-Gravitas#1609 from younessZMZ/branch1
Browse files Browse the repository at this point in the history
Adjust test_prompt_generator and add test report generation
  • Loading branch information
richbeales authored Apr 15, 2023
2 parents c11c5d5 + 02d75ce commit 9eb138f
Show file tree
Hide file tree
Showing 2 changed files with 50 additions and 23 deletions.
14 changes: 13 additions & 1 deletion tests.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,20 @@
import unittest
import coverage

if __name__ == "__main__":
# Start coverage collection
cov = coverage.Coverage()
cov.start()

# Load all tests from the 'autogpt/tests' package
suite = unittest.defaultTestLoader.discover("autogpt/tests")
suite = unittest.defaultTestLoader.discover("./tests")

# Run the tests
unittest.TextTestRunner().run(suite)

# Stop coverage collection
cov.stop()
cov.save()

# Report the coverage
cov.report(show_missing=True)
59 changes: 37 additions & 22 deletions tests/promptgenerator_tests.py → tests/test_prompt_generator.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,35 @@
# Import the required libraries for unit testing
import os
import sys
import unittest
from unittest import TestCase

from autogpt.promptgenerator import PromptGenerator


# Create a test class for the PromptGenerator, subclassed from unittest.TestCase
class promptgenerator_tests(unittest.TestCase):
# Set up the initial state for each test method by creating an instance of PromptGenerator
def setUp(self):
self.generator = PromptGenerator()
class TestPromptGenerator(TestCase):
"""
Test cases for the PromptGenerator class, which is responsible for generating
prompts for the AI with constraints, commands, resources, and performance evaluations.
"""

@classmethod
def setUpClass(cls):
"""
Set up the initial state for each test method by creating an instance of PromptGenerator.
"""
cls.generator = PromptGenerator()

# Test whether the add_constraint() method adds a constraint to the generator's constraints list
def test_add_constraint(self):
"""
Test if the add_constraint() method adds a constraint to the generator's constraints list.
"""
constraint = "Constraint1"
self.generator.add_constraint(constraint)
self.assertIn(constraint, self.generator.constraints)

# Test whether the add_command() method adds a command to the generator's commands list
def test_add_command(self):
"""
Test if the add_command() method adds a command to the generator's commands list.
"""
command_label = "Command Label"
command_name = "command_name"
args = {"arg1": "value1", "arg2": "value2"}
Expand All @@ -31,20 +41,29 @@ def test_add_command(self):
}
self.assertIn(command, self.generator.commands)

# Test whether the add_resource() method adds a resource to the generator's resources list
def test_add_resource(self):
"""
Test if the add_resource() method adds a resource to the generator's resources list.
"""
resource = "Resource1"
self.generator.add_resource(resource)
self.assertIn(resource, self.generator.resources)

# Test whether the add_performance_evaluation() method adds an evaluation to the generator's performance_evaluation list
def test_add_performance_evaluation(self):
"""
Test if the add_performance_evaluation() method adds an evaluation to the generator's
performance_evaluation list.
"""
evaluation = "Evaluation1"
self.generator.add_performance_evaluation(evaluation)
self.assertIn(evaluation, self.generator.performance_evaluation)

# Test whether the generate_prompt_string() method generates a prompt string with all the added constraints, commands, resources and evaluations
def test_generate_prompt_string(self):
"""
Test if the generate_prompt_string() method generates a prompt string with all the added
constraints, commands, resources, and evaluations.
"""
# Define the test data
constraints = ["Constraint1", "Constraint2"]
commands = [
{
Expand All @@ -61,7 +80,7 @@ def test_generate_prompt_string(self):
resources = ["Resource1", "Resource2"]
evaluations = ["Evaluation1", "Evaluation2"]

# Add all the constraints, commands, resources, and evaluations to the generator
# Add test data to the generator
for constraint in constraints:
self.generator.add_constraint(constraint)
for command in commands:
Expand All @@ -76,24 +95,20 @@ def test_generate_prompt_string(self):
# Generate the prompt string and verify its correctness
prompt_string = self.generator.generate_prompt_string()
self.assertIsNotNone(prompt_string)

# Check if all constraints, commands, resources, and evaluations are present in the prompt string
for constraint in constraints:
self.assertIn(constraint, prompt_string)
for command in commands:
self.assertIn(command["name"], prompt_string)

# Check for each key-value pair in the command args dictionary
for key, value in command["args"].items():
self.assertIn(f'"{key}": "{value}"', prompt_string)
for key, value in command["args"].items():
self.assertIn(f'"{key}": "{value}"', prompt_string)
for resource in resources:
self.assertIn(resource, prompt_string)
for evaluation in evaluations:
self.assertIn(evaluation, prompt_string)

self.assertIn("constraints", prompt_string.lower())
self.assertIn("commands", prompt_string.lower())
self.assertIn("resources", prompt_string.lower())
self.assertIn("performance evaluation", prompt_string.lower())


# Run the tests when this script is executed
if __name__ == "__main__":
unittest.main()

0 comments on commit 9eb138f

Please sign in to comment.