mirror of
https://github.com/DavidGailleton/42-Piscine_Python.git
synced 2026-03-14 05:06:55 +01:00
Compare commits
2 Commits
05a3ddc8b6
...
c44a72413e
| Author | SHA1 | Date | |
|---|---|---|---|
| c44a72413e | |||
| 161ab4339c |
@@ -1,14 +1,14 @@
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
args = sys.argv
|
try:
|
||||||
if len(args) <= 1:
|
args = sys.argv
|
||||||
print("No scores provided. Usage: python3\
|
if len(args) <= 1:
|
||||||
|
raise Exception("No scores provided. Usage: python3\
|
||||||
ft_score_analytics.py <score1> <score2> ...")
|
ft_score_analytics.py <score1> <score2> ...")
|
||||||
else:
|
else:
|
||||||
scores = [0] * (len(args) - 1)
|
scores = [0] * (len(args) - 1)
|
||||||
i = 0
|
i = 0
|
||||||
try:
|
|
||||||
while i < len(args) - 1:
|
while i < len(args) - 1:
|
||||||
scores[i] = int(args[i + 1])
|
scores[i] = int(args[i + 1])
|
||||||
i = i + 1
|
i = i + 1
|
||||||
@@ -19,5 +19,7 @@ if __name__ == "__main__":
|
|||||||
print("High score:", max(scores))
|
print("High score:", max(scores))
|
||||||
print("Low score:", min(scores))
|
print("Low score:", min(scores))
|
||||||
print("Score range", max(scores) - min(scores))
|
print("Score range", max(scores) - min(scores))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print("Invalid input, only numbers are accepted")
|
print("Invalid input, only numbers are accepted")
|
||||||
|
except Exception as err:
|
||||||
|
print(err)
|
||||||
|
|||||||
@@ -14,16 +14,17 @@ if __name__ == "__main__":
|
|||||||
try:
|
try:
|
||||||
if len(argv) != 2:
|
if len(argv) != 2:
|
||||||
raise Exception("Invalid number of args")
|
raise Exception("Invalid number of args")
|
||||||
|
print(f'Parsing coordinates: "{argv[1]}"')
|
||||||
args = argv[1].split(",")
|
args = argv[1].split(",")
|
||||||
if len(args) != 3:
|
if len(args) != 3:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"Invalid argument format." + 'Try like this : "15,64,78"'
|
"Invalid argument format." + 'Try like this : "15,64,78"'
|
||||||
)
|
)
|
||||||
int_args = (int(args[0]), int(args[1]), int(args[2]))
|
int_args = (int(args[0]), int(args[1]), int(args[2]))
|
||||||
print("Parsing coordinates:", args[1])
|
print(f"Parsed position: {int_args}")
|
||||||
print_distance((0, 0, 0), int_args)
|
print_distance((0, 0, 0), int_args)
|
||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
print(f'Parsing invalid coordinates: "{argv[1]}"')
|
print(f'Invalid coordinates: "{argv[1]}"')
|
||||||
print(err)
|
print(err)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
print(err)
|
print(err)
|
||||||
|
|||||||
@@ -1,46 +1,94 @@
|
|||||||
def tracker_system(players: dict[str, list[str]]) -> None:
|
def tracker_system(players: dict[str, list[str]]) -> None:
|
||||||
print("=== Achievement Tracker System ===\n")
|
print("=== Achievement Tracker System ===\n")
|
||||||
for player in players:
|
try:
|
||||||
print(f"Player {player} achievements: {set((players[player]))}")
|
for player in players:
|
||||||
print("\n=== Achievement Analytics ===")
|
print(f"Player {player} achievements: {set((players[player]))}")
|
||||||
unique_achievements: set[str] = set(())
|
print("\n=== Achievement Analytics ===")
|
||||||
for player in players:
|
unique_achievements: set[str] = set(())
|
||||||
unique_achievements = unique_achievements | set((players[player]))
|
for player in players:
|
||||||
print(f"All unique achievements: {unique_achievements}")
|
unique_achievements = unique_achievements | set((players[player]))
|
||||||
print(f"Total unique achievements: {len(unique_achievements)}")
|
print(f"All unique achievements: {unique_achievements}")
|
||||||
common_achievements: set[str] = unique_achievements
|
print(f"Total unique achievements: {len(unique_achievements)}")
|
||||||
for player in players:
|
common_achievements: set[str] = unique_achievements
|
||||||
common_achievements = common_achievements & set((players[player]))
|
for player in players:
|
||||||
print(f"\nCommon to all players: {common_achievements}")
|
common_achievements = common_achievements & set((players[player]))
|
||||||
player_rare: dict[str, set[str]] = {}
|
print(f"\nCommon to all players: {common_achievements}")
|
||||||
for player in players:
|
except Exception as err:
|
||||||
temp = set((players[player]))
|
print(err)
|
||||||
for other in players:
|
try:
|
||||||
if other != player:
|
player_rare: dict[str, set[str]] = {}
|
||||||
temp = temp - set((players[other]))
|
for player in players:
|
||||||
player_rare[player] = temp
|
temp = set((players[player]))
|
||||||
rare_achievements: set[str] = set(())
|
for other in players:
|
||||||
for n in player_rare:
|
if other != player:
|
||||||
rare_achievements = rare_achievements | player_rare[n]
|
temp = temp - set((players[other]))
|
||||||
print(f"Rare achievements (1 player): {rare_achievements}\n")
|
player_rare[player] = temp
|
||||||
a_vs_b_common = set((players["Alice"])) & set((players["Bob"]))
|
rare_achievements: set[str] = set(())
|
||||||
print(f"Alice vs Bob common: {a_vs_b_common}")
|
for n in player_rare:
|
||||||
alice_unique = set((players["Alice"])) - set((players["Bob"]))
|
rare_achievements = rare_achievements | player_rare[n]
|
||||||
print(f"Alice unique: {alice_unique}")
|
print(f"Rare achievements (1 player): {rare_achievements}\n")
|
||||||
bob_unique = set((players["Bob"])) - set((players["Alice"]))
|
a_vs_b_common = set((players["alice"])) & set((players["bob"]))
|
||||||
print(f"Bob unique: {bob_unique}")
|
print(f"Alice vs Bob common: {a_vs_b_common}")
|
||||||
|
alice_unique = set((players["alice"])) - set((players["bob"]))
|
||||||
|
print(f"Alice unique: {alice_unique}")
|
||||||
|
bob_unique = set((players["bob"])) - set((players["alice"]))
|
||||||
|
print(f"Bob unique: {bob_unique}")
|
||||||
|
except Exception as err:
|
||||||
|
print(err)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
players = {
|
data = {
|
||||||
"Alice": ["first_kill", "level_10", "treasure_hunter", "speed_demon"],
|
"alice": [
|
||||||
"Bob": ["first_kill", "level_10", "boss_slayer", "collector"],
|
"first_blood",
|
||||||
"Charlie": [
|
"pixel_perfect",
|
||||||
"level_10",
|
"speed_runner",
|
||||||
"treasure_hunter",
|
"first_blood",
|
||||||
"boss_slayer",
|
"first_blood",
|
||||||
"speed_demon",
|
"boss_hunter",
|
||||||
"perfectionist",
|
],
|
||||||
|
"bob": [
|
||||||
|
"level_master",
|
||||||
|
"boss_hunter",
|
||||||
|
"treasure_seeker",
|
||||||
|
"level_master",
|
||||||
|
"level_master",
|
||||||
|
],
|
||||||
|
"charlie": [
|
||||||
|
"treasure_seeker",
|
||||||
|
"boss_hunter",
|
||||||
|
"combo_king",
|
||||||
|
"first_blood",
|
||||||
|
"boss_hunter",
|
||||||
|
"first_blood",
|
||||||
|
"boss_hunter",
|
||||||
|
"first_blood",
|
||||||
|
],
|
||||||
|
"diana": [
|
||||||
|
"first_blood",
|
||||||
|
"combo_king",
|
||||||
|
"level_master",
|
||||||
|
"treasure_seeker",
|
||||||
|
"speed_runner",
|
||||||
|
"combo_king",
|
||||||
|
"combo_king",
|
||||||
|
"level_master",
|
||||||
|
],
|
||||||
|
"eve": [
|
||||||
|
"level_master",
|
||||||
|
"treasure_seeker",
|
||||||
|
"first_blood",
|
||||||
|
"treasure_seeker",
|
||||||
|
"first_blood",
|
||||||
|
"treasure_seeker",
|
||||||
|
],
|
||||||
|
"frank": [
|
||||||
|
"explorer",
|
||||||
|
"boss_hunter",
|
||||||
|
"first_blood",
|
||||||
|
"explorer",
|
||||||
|
"first_blood",
|
||||||
|
"boss_hunter",
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
tracker_system(players)
|
tracker_system(data)
|
||||||
|
|||||||
@@ -72,19 +72,22 @@ def dictionnary_properties_demo(inventory: dict[str, int]) -> None:
|
|||||||
def inventory_report(inventory: dict[str, int]) -> None:
|
def inventory_report(inventory: dict[str, int]) -> None:
|
||||||
nb_unique_items = len(inventory)
|
nb_unique_items = len(inventory)
|
||||||
nb_item_in_inventory = 0
|
nb_item_in_inventory = 0
|
||||||
for n in inventory:
|
try:
|
||||||
nb_item_in_inventory += inventory[n]
|
for n in inventory:
|
||||||
if nb_item_in_inventory == 0:
|
nb_item_in_inventory += inventory[n]
|
||||||
print("Inventory is empty")
|
if nb_item_in_inventory == 0:
|
||||||
else:
|
print("Inventory is empty")
|
||||||
print("=== Inventory System Analysis ===")
|
else:
|
||||||
print(f"Total items in inventory: {nb_item_in_inventory}")
|
print("=== Inventory System Analysis ===")
|
||||||
print(f"Unique item types: {nb_unique_items}")
|
print(f"Total items in inventory: {nb_item_in_inventory}")
|
||||||
current_inventory(inventory, nb_item_in_inventory)
|
print(f"Unique item types: {nb_unique_items}")
|
||||||
inventory_statistics(inventory)
|
current_inventory(inventory, nb_item_in_inventory)
|
||||||
item_categories(inventory)
|
inventory_statistics(inventory)
|
||||||
management_suggestions(inventory)
|
item_categories(inventory)
|
||||||
dictionnary_properties_demo(inventory)
|
management_suggestions(inventory)
|
||||||
|
dictionnary_properties_demo(inventory)
|
||||||
|
except Exception as err:
|
||||||
|
print(err)
|
||||||
|
|
||||||
|
|
||||||
def main(argv: list[str]) -> None:
|
def main(argv: list[str]) -> None:
|
||||||
|
|||||||
@@ -70,14 +70,14 @@ def game_data_stream_processor(
|
|||||||
|
|
||||||
def generator_demo() -> None:
|
def generator_demo() -> None:
|
||||||
print("=== Generator Demonstration ===")
|
print("=== Generator Demonstration ===")
|
||||||
print("Fibonacci sequence (first 10): ", end="")
|
print("Fibonacci sequence (first 10):", end="")
|
||||||
fib = fibonacci()
|
fib = fibonacci()
|
||||||
for _ in range(10):
|
for _ in range(10):
|
||||||
print(f"{next(fib)} ", end="")
|
print(f" {next(fib)}", end="")
|
||||||
print("\nPrime number (first 5): ", end="")
|
print("\nPrime number (first 5):", end="")
|
||||||
prime = prime_number()
|
prime = prime_number()
|
||||||
for _ in range(5):
|
for _ in range(5):
|
||||||
print(f"{next(prime)} ", end="")
|
print(f" {next(prime)}", end="")
|
||||||
print("")
|
print("")
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -83,10 +83,19 @@ def comprehension_tester(
|
|||||||
try:
|
try:
|
||||||
print("=== game Analytics Dashboard ===\n")
|
print("=== game Analytics Dashboard ===\n")
|
||||||
list_comprehension_example(data)
|
list_comprehension_example(data)
|
||||||
|
except Exception as err:
|
||||||
|
print(err)
|
||||||
|
try:
|
||||||
print()
|
print()
|
||||||
dict_comprehension_example(data)
|
dict_comprehension_example(data)
|
||||||
|
except Exception as err:
|
||||||
|
print(err)
|
||||||
|
try:
|
||||||
print()
|
print()
|
||||||
set_comprehension_example(data)
|
set_comprehension_example(data)
|
||||||
|
except Exception as err:
|
||||||
|
print(err)
|
||||||
|
try:
|
||||||
print()
|
print()
|
||||||
combined_analysis(data)
|
combined_analysis(data)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
|
|||||||
571
05/main.py
Normal file
571
05/main.py
Normal file
@@ -0,0 +1,571 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Code Nexus - Polymorphism Testing Suite with Type Checking
|
||||||
|
|
||||||
|
This testing suite validates your implementation of polymorphic data processing systems.
|
||||||
|
It checks for proper method overriding, inheritance relationships, polymorphic behavior,
|
||||||
|
and type annotations across all three exercises.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python3 main.py [OPTIONS]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
-h, --help Show this help message and exit
|
||||||
|
-v, --verbose Enable verbose output with detailed test information
|
||||||
|
|
||||||
|
Requirements:
|
||||||
|
- Python 3.10 or later
|
||||||
|
- Your exercise files must be in ex0/, ex1/, and ex2/ directories
|
||||||
|
- All code must include proper type annotations
|
||||||
|
|
||||||
|
Expected Directory Structure:
|
||||||
|
your-project/
|
||||||
|
├── ex0/
|
||||||
|
│ └── stream_processor.py
|
||||||
|
├── ex1/
|
||||||
|
│ └── data_stream.py
|
||||||
|
├── ex2/
|
||||||
|
│ └── nexus_pipeline.py
|
||||||
|
└── main.py (this file)
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
python3 main.py # Run all tests
|
||||||
|
python3 main.py --help # Show this help message
|
||||||
|
python3 main.py --verbose # Run with detailed output
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import importlib.util
|
||||||
|
import ast
|
||||||
|
from typing import List, Dict, Any, Optional, Tuple # noqa: F401
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
class TestResult:
|
||||||
|
"""Encapsulates test results with detailed feedback."""
|
||||||
|
|
||||||
|
def __init__(self, name: str) -> None:
|
||||||
|
self.name: str = name
|
||||||
|
self.passed: bool = False
|
||||||
|
self.errors: List[str] = []
|
||||||
|
self.warnings: List[str] = []
|
||||||
|
|
||||||
|
def add_error(self, error: str) -> None:
|
||||||
|
"""Add an error message."""
|
||||||
|
self.errors.append(error)
|
||||||
|
|
||||||
|
def add_warning(self, warning: str) -> None:
|
||||||
|
"""Add a warning message."""
|
||||||
|
self.warnings.append(warning)
|
||||||
|
|
||||||
|
def mark_passed(self) -> None:
|
||||||
|
"""Mark test as passed."""
|
||||||
|
self.passed = True
|
||||||
|
|
||||||
|
|
||||||
|
class TypeChecker:
|
||||||
|
"""Validates type annotations in Python code."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.required_imports = [
|
||||||
|
"typing", "Any", "List", "Dict", "Union", "Optional"
|
||||||
|
]
|
||||||
|
|
||||||
|
def check_file_typing(self, file_path: str) -> Tuple[bool, List[str]]:
|
||||||
|
"""Check if file has proper type annotations."""
|
||||||
|
try:
|
||||||
|
with open(file_path, 'r') as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
tree = ast.parse(content)
|
||||||
|
issues = []
|
||||||
|
has_typing_imports = False
|
||||||
|
|
||||||
|
# Check for typing imports
|
||||||
|
for node in ast.walk(tree):
|
||||||
|
if isinstance(node, ast.ImportFrom) and node.module == "typing":
|
||||||
|
has_typing_imports = True
|
||||||
|
elif isinstance(node, ast.Import):
|
||||||
|
for alias in node.names:
|
||||||
|
if alias.name == "typing":
|
||||||
|
has_typing_imports = True
|
||||||
|
|
||||||
|
if not has_typing_imports:
|
||||||
|
issues.append("Missing typing imports")
|
||||||
|
|
||||||
|
# Check function definitions for type annotations
|
||||||
|
function_count = 0
|
||||||
|
typed_functions = 0
|
||||||
|
|
||||||
|
for node in ast.walk(tree):
|
||||||
|
if isinstance(node, ast.FunctionDef):
|
||||||
|
function_count += 1
|
||||||
|
|
||||||
|
# Check return type annotation
|
||||||
|
if node.returns is not None:
|
||||||
|
typed_functions += 1
|
||||||
|
|
||||||
|
# Check parameter type annotations
|
||||||
|
for arg in node.args.args:
|
||||||
|
if arg.annotation is None and arg.arg != "self":
|
||||||
|
issues.append(
|
||||||
|
f"Function '{node.name}' parameter "
|
||||||
|
f"'{arg.arg}' missing type annotation"
|
||||||
|
)
|
||||||
|
|
||||||
|
if function_count > 0:
|
||||||
|
typing_coverage = (typed_functions / function_count) * 100
|
||||||
|
if typing_coverage < 80:
|
||||||
|
issues.append(
|
||||||
|
f"Low typing coverage: {typing_coverage:.1f}% of "
|
||||||
|
f"functions have return type annotations"
|
||||||
|
)
|
||||||
|
|
||||||
|
return len(issues) == 0, issues
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return False, [f"Error checking types: {str(e)}"]
|
||||||
|
|
||||||
|
|
||||||
|
class PolymorphismTester:
|
||||||
|
"""Main testing class for polymorphic implementations with type checking."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.results: List[TestResult] = []
|
||||||
|
self.type_checker = TypeChecker()
|
||||||
|
|
||||||
|
def load_module(self, file_path: str, module_name: str) -> Optional[Any]:
|
||||||
|
"""Dynamically load a Python module from file path."""
|
||||||
|
try:
|
||||||
|
spec = importlib.util.spec_from_file_location(module_name, file_path)
|
||||||
|
if spec is None or spec.loader is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
module = importlib.util.module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(module)
|
||||||
|
return module
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error loading module {module_name}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def run_all_tests(self, verbose: bool = False) -> bool:
|
||||||
|
"""Execute all test suites and return overall success status."""
|
||||||
|
print("CODE NEXUS - POLYMORPHISM TESTING SUITE WITH TYPE CHECKING")
|
||||||
|
print("=" * 60)
|
||||||
|
print("Testing polymorphic implementations with type annotations.")
|
||||||
|
if verbose:
|
||||||
|
print("Verbose mode enabled - detailed output will be shown.")
|
||||||
|
print()
|
||||||
|
|
||||||
|
# Run individual test suites
|
||||||
|
self._test_exercise_0()
|
||||||
|
self._test_exercise_1()
|
||||||
|
self._test_exercise_2()
|
||||||
|
|
||||||
|
# Display summary
|
||||||
|
self._display_summary()
|
||||||
|
|
||||||
|
return all(result.passed for result in self.results)
|
||||||
|
|
||||||
|
def _test_exercise_0(self) -> None:
|
||||||
|
"""Test Exercise 0: Data Processor Foundation with type checking."""
|
||||||
|
result = TestResult("Exercise 0: Data Processor Foundation")
|
||||||
|
|
||||||
|
print("=== Testing Exercise 0: Data Processor Foundation ===")
|
||||||
|
|
||||||
|
try:
|
||||||
|
file_path = "ex0/stream_processor.py"
|
||||||
|
|
||||||
|
# Check type annotations first
|
||||||
|
has_proper_typing, typing_issues = self.type_checker.check_file_typing(file_path)
|
||||||
|
if not has_proper_typing:
|
||||||
|
result.add_error("Type annotation issues found:")
|
||||||
|
for issue in typing_issues:
|
||||||
|
result.add_error(f" • {issue}")
|
||||||
|
else:
|
||||||
|
print("✓ Type annotations properly implemented")
|
||||||
|
|
||||||
|
# Import your implementation
|
||||||
|
stream_processor = self.load_module(file_path, "stream_processor")
|
||||||
|
if not stream_processor:
|
||||||
|
result.add_error("Could not load stream_processor.py module")
|
||||||
|
else:
|
||||||
|
# Verify required classes exist
|
||||||
|
required_classes = [
|
||||||
|
'DataProcessor', 'NumericProcessor',
|
||||||
|
'TextProcessor', 'LogProcessor'
|
||||||
|
]
|
||||||
|
missing_classes = []
|
||||||
|
|
||||||
|
for class_name in required_classes:
|
||||||
|
if not hasattr(stream_processor, class_name):
|
||||||
|
missing_classes.append(class_name)
|
||||||
|
|
||||||
|
if missing_classes:
|
||||||
|
result.add_error(f"Missing classes: {', '.join(missing_classes)}")
|
||||||
|
else:
|
||||||
|
print("✓ All required classes found")
|
||||||
|
|
||||||
|
# Test inheritance relationships
|
||||||
|
DataProcessor = getattr(stream_processor, 'DataProcessor')
|
||||||
|
NumericProcessor = getattr(stream_processor, 'NumericProcessor')
|
||||||
|
TextProcessor = getattr(stream_processor, 'TextProcessor')
|
||||||
|
LogProcessor = getattr(stream_processor, 'LogProcessor')
|
||||||
|
|
||||||
|
if not issubclass(NumericProcessor, DataProcessor):
|
||||||
|
result.add_error(
|
||||||
|
"NumericProcessor must inherit from DataProcessor"
|
||||||
|
)
|
||||||
|
if not issubclass(TextProcessor, DataProcessor):
|
||||||
|
result.add_error(
|
||||||
|
"TextProcessor must inherit from DataProcessor"
|
||||||
|
)
|
||||||
|
if not issubclass(LogProcessor, DataProcessor):
|
||||||
|
result.add_error(
|
||||||
|
"LogProcessor must inherit from DataProcessor"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not result.errors:
|
||||||
|
print("✓ Inheritance relationships verified")
|
||||||
|
|
||||||
|
# Test method overriding
|
||||||
|
try:
|
||||||
|
numeric_proc = NumericProcessor()
|
||||||
|
text_proc = TextProcessor()
|
||||||
|
log_proc = LogProcessor()
|
||||||
|
|
||||||
|
# Verify methods exist and are callable
|
||||||
|
for processor in [numeric_proc, text_proc, log_proc]:
|
||||||
|
for method in ['process', 'validate',
|
||||||
|
'format_output']:
|
||||||
|
if not hasattr(processor, method) or \
|
||||||
|
not callable(getattr(processor, method)):
|
||||||
|
result.add_error(
|
||||||
|
f"{processor.__class__.__name__} "
|
||||||
|
f"missing method: {method}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not result.errors:
|
||||||
|
print("✓ Method overriding implemented correctly")
|
||||||
|
if has_proper_typing:
|
||||||
|
result.mark_passed()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
result.add_error(f"Error testing method overriding: {str(e)}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
result.add_error(f"Unexpected error: {str(e)}")
|
||||||
|
|
||||||
|
if not result.passed:
|
||||||
|
print("✗ Exercise 0 failed - check implementation and type annotations")
|
||||||
|
|
||||||
|
self.results.append(result)
|
||||||
|
|
||||||
|
def _test_exercise_1(self) -> None:
|
||||||
|
"""Test Exercise 1: Polymorphic Streams with type checking."""
|
||||||
|
result = TestResult("Exercise 1: Polymorphic Streams")
|
||||||
|
|
||||||
|
print("\n=== Testing Exercise 1: Polymorphic Streams ===")
|
||||||
|
|
||||||
|
try:
|
||||||
|
file_path = "ex1/data_stream.py"
|
||||||
|
|
||||||
|
# Check type annotations first
|
||||||
|
has_proper_typing, typing_issues = self.type_checker.check_file_typing(file_path)
|
||||||
|
if not has_proper_typing:
|
||||||
|
result.add_error("Type annotation issues found:")
|
||||||
|
for issue in typing_issues:
|
||||||
|
result.add_error(f" • {issue}")
|
||||||
|
else:
|
||||||
|
print("✓ Type annotations properly implemented")
|
||||||
|
|
||||||
|
# Import your implementation
|
||||||
|
data_stream = self.load_module(file_path, "data_stream")
|
||||||
|
if not data_stream:
|
||||||
|
result.add_error("Could not load data_stream.py module")
|
||||||
|
else:
|
||||||
|
# Verify required classes exist
|
||||||
|
required_classes = [
|
||||||
|
'DataStream', 'SensorStream', 'TransactionStream',
|
||||||
|
'EventStream', 'StreamProcessor'
|
||||||
|
]
|
||||||
|
missing_classes = []
|
||||||
|
|
||||||
|
for class_name in required_classes:
|
||||||
|
if not hasattr(data_stream, class_name):
|
||||||
|
missing_classes.append(class_name)
|
||||||
|
|
||||||
|
if missing_classes:
|
||||||
|
result.add_error(f"Missing classes: {', '.join(missing_classes)}")
|
||||||
|
else:
|
||||||
|
print("✓ All required classes found")
|
||||||
|
|
||||||
|
# Test inheritance and polymorphism
|
||||||
|
DataStream = getattr(data_stream, 'DataStream')
|
||||||
|
SensorStream = getattr(data_stream, 'SensorStream')
|
||||||
|
TransactionStream = getattr(data_stream, 'TransactionStream')
|
||||||
|
EventStream = getattr(data_stream, 'EventStream')
|
||||||
|
|
||||||
|
# Verify inheritance
|
||||||
|
stream_classes = [
|
||||||
|
SensorStream, TransactionStream, EventStream
|
||||||
|
]
|
||||||
|
for stream_class in stream_classes:
|
||||||
|
if not issubclass(stream_class, DataStream):
|
||||||
|
result.add_error(
|
||||||
|
f"{stream_class.__name__} must inherit "
|
||||||
|
f"from DataStream"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not result.errors:
|
||||||
|
print("✓ Inheritance relationships verified")
|
||||||
|
|
||||||
|
# Test polymorphic behavior
|
||||||
|
try:
|
||||||
|
sensor = SensorStream("SENSOR_001")
|
||||||
|
transaction = TransactionStream("TRANS_001")
|
||||||
|
event = EventStream("EVENT_001")
|
||||||
|
|
||||||
|
# Verify polymorphic processing
|
||||||
|
streams = [sensor, transaction, event]
|
||||||
|
for stream in streams:
|
||||||
|
if not hasattr(stream, 'process_batch') or \
|
||||||
|
not callable(getattr(stream, 'process_batch')):
|
||||||
|
result.add_error(
|
||||||
|
f"{stream.__class__.__name__} "
|
||||||
|
f"missing process_batch method"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not result.errors:
|
||||||
|
print("✓ Polymorphic behavior implemented correctly")
|
||||||
|
if has_proper_typing:
|
||||||
|
result.mark_passed()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
result.add_error(f"Error testing polymorphic behavior: {str(e)}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
result.add_error(f"Unexpected error: {str(e)}")
|
||||||
|
|
||||||
|
if not result.passed:
|
||||||
|
print("✗ Exercise 1 failed - check implementation and type annotations")
|
||||||
|
|
||||||
|
self.results.append(result)
|
||||||
|
|
||||||
|
def _test_exercise_2(self) -> None:
|
||||||
|
"""Test Exercise 2: Nexus Integration with type checking."""
|
||||||
|
result = TestResult("Exercise 2: Nexus Integration")
|
||||||
|
|
||||||
|
print("\n=== Testing Exercise 2: Nexus Integration ===")
|
||||||
|
|
||||||
|
try:
|
||||||
|
file_path = "ex2/nexus_pipeline.py"
|
||||||
|
|
||||||
|
# Check type annotations first
|
||||||
|
has_proper_typing, typing_issues = self.type_checker.check_file_typing(file_path)
|
||||||
|
if not has_proper_typing:
|
||||||
|
result.add_error("Type annotation issues found:")
|
||||||
|
for issue in typing_issues:
|
||||||
|
result.add_error(f" • {issue}")
|
||||||
|
else:
|
||||||
|
print("✓ Type annotations properly implemented")
|
||||||
|
|
||||||
|
# Import your implementation
|
||||||
|
nexus_pipeline = self.load_module(file_path, "nexus_pipeline")
|
||||||
|
if not nexus_pipeline:
|
||||||
|
result.add_error("Could not load nexus_pipeline.py module")
|
||||||
|
else:
|
||||||
|
# Verify required classes exist
|
||||||
|
required_classes = [
|
||||||
|
'ProcessingPipeline', 'InputStage', 'TransformStage',
|
||||||
|
'OutputStage', 'JSONAdapter', 'CSVAdapter',
|
||||||
|
'StreamAdapter', 'NexusManager'
|
||||||
|
]
|
||||||
|
missing_classes = []
|
||||||
|
|
||||||
|
for class_name in required_classes:
|
||||||
|
if not hasattr(nexus_pipeline, class_name):
|
||||||
|
missing_classes.append(class_name)
|
||||||
|
|
||||||
|
if missing_classes:
|
||||||
|
result.add_error(f"Missing classes: {', '.join(missing_classes)}")
|
||||||
|
else:
|
||||||
|
print("✓ All required classes found")
|
||||||
|
|
||||||
|
# Test complex inheritance hierarchy
|
||||||
|
ProcessingPipeline = getattr(nexus_pipeline, 'ProcessingPipeline')
|
||||||
|
JSONAdapter = getattr(nexus_pipeline, 'JSONAdapter')
|
||||||
|
CSVAdapter = getattr(nexus_pipeline, 'CSVAdapter')
|
||||||
|
StreamAdapter = getattr(nexus_pipeline, 'StreamAdapter')
|
||||||
|
|
||||||
|
# Verify inheritance
|
||||||
|
adapter_classes = [
|
||||||
|
JSONAdapter, CSVAdapter, StreamAdapter
|
||||||
|
]
|
||||||
|
for adapter_class in adapter_classes:
|
||||||
|
if not issubclass(adapter_class, ProcessingPipeline):
|
||||||
|
result.add_error(
|
||||||
|
f"{adapter_class.__name__} must inherit "
|
||||||
|
f"from ProcessingPipeline"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not result.errors:
|
||||||
|
print("✓ Complex inheritance hierarchy verified")
|
||||||
|
|
||||||
|
# Test enterprise-level polymorphism
|
||||||
|
try:
|
||||||
|
# Verify manager can handle different pipeline types
|
||||||
|
json_adapter = JSONAdapter("JSON_001")
|
||||||
|
csv_adapter = CSVAdapter("CSV_001")
|
||||||
|
stream_adapter = StreamAdapter("STREAM_001")
|
||||||
|
|
||||||
|
adapters = [
|
||||||
|
json_adapter, csv_adapter, stream_adapter
|
||||||
|
]
|
||||||
|
for adapter in adapters:
|
||||||
|
if not hasattr(adapter, 'process') or \
|
||||||
|
not callable(getattr(adapter, 'process')):
|
||||||
|
result.add_error(
|
||||||
|
f"{adapter.__class__.__name__} "
|
||||||
|
f"missing process method"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not result.errors:
|
||||||
|
print("✓ Enterprise-level polymorphism implemented correctly")
|
||||||
|
if has_proper_typing:
|
||||||
|
result.mark_passed()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
result.add_error(f"Error testing enterprise polymorphism: {str(e)}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
result.add_error(f"Unexpected error: {str(e)}")
|
||||||
|
|
||||||
|
if not result.passed:
|
||||||
|
print("✗ Exercise 2 failed - check implementation and type annotations")
|
||||||
|
|
||||||
|
self.results.append(result)
|
||||||
|
|
||||||
|
def _display_summary(self) -> None:
|
||||||
|
"""Display comprehensive test summary."""
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
print("TESTING SUMMARY")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
passed_count = sum(1 for result in self.results if result.passed)
|
||||||
|
total_count = len(self.results)
|
||||||
|
|
||||||
|
print(f"Exercises passed: {passed_count}/{total_count}")
|
||||||
|
|
||||||
|
if passed_count == total_count:
|
||||||
|
print("🎉 All tests passed! Your polymorphic implementations "
|
||||||
|
"with type annotations are working correctly.")
|
||||||
|
else:
|
||||||
|
print("⚠️ Some exercises need work. "
|
||||||
|
"Check the error messages above.")
|
||||||
|
print("Make sure you've implemented all required classes, "
|
||||||
|
"methods, and type annotations.")
|
||||||
|
|
||||||
|
# Display detailed errors if any
|
||||||
|
for result in self.results:
|
||||||
|
if result.errors:
|
||||||
|
print(f"\n❌ {result.name}:")
|
||||||
|
for error in result.errors:
|
||||||
|
print(f" • {error}")
|
||||||
|
|
||||||
|
print("\nRemember: This tests basic functionality "
|
||||||
|
"and type annotations.")
|
||||||
|
print("Make sure your code demonstrates proper polymorphic behavior "
|
||||||
|
"with complete typing!")
|
||||||
|
|
||||||
|
|
||||||
|
def print_help() -> None:
|
||||||
|
"""Display help message."""
|
||||||
|
help_text = """
|
||||||
|
CODE NEXUS - POLYMORPHISM TESTING SUITE
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python3 main.py [OPTIONS]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
-h, --help Show this help message and exit
|
||||||
|
-v, --verbose Enable verbose output with detailed test information
|
||||||
|
|
||||||
|
Description:
|
||||||
|
This testing suite validates your polymorphic data processing
|
||||||
|
implementations across three exercises. It checks for:
|
||||||
|
- Proper method overriding and inheritance relationships
|
||||||
|
- Polymorphic behavior using abstract base classes (ABC)
|
||||||
|
- Comprehensive type annotations throughout all code
|
||||||
|
- Correct implementation of required classes and methods
|
||||||
|
|
||||||
|
Requirements:
|
||||||
|
- Python 3.10 or later
|
||||||
|
- Exercise files in ex0/, ex1/, and ex2/ directories
|
||||||
|
- All code must include proper type annotations
|
||||||
|
|
||||||
|
Expected Directory Structure:
|
||||||
|
your-project/
|
||||||
|
├── ex0/
|
||||||
|
│ └── stream_processor.py
|
||||||
|
├── ex1/
|
||||||
|
│ └── data_stream.py
|
||||||
|
├── ex2/
|
||||||
|
│ └── nexus_pipeline.py
|
||||||
|
└── main.py (this file)
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
python3 main.py # Run all tests
|
||||||
|
python3 main.py --help # Show this help message
|
||||||
|
python3 main.py --verbose # Run with detailed output
|
||||||
|
|
||||||
|
For more information, refer to the project subject.
|
||||||
|
"""
|
||||||
|
print(help_text)
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""Main entry point for the testing suite."""
|
||||||
|
# Parse command line arguments
|
||||||
|
verbose = False
|
||||||
|
if len(sys.argv) > 1:
|
||||||
|
if sys.argv[1] in ['-h', '--help']:
|
||||||
|
print_help()
|
||||||
|
sys.exit(0)
|
||||||
|
elif sys.argv[1] in ['-v', '--verbose']:
|
||||||
|
verbose = True
|
||||||
|
else:
|
||||||
|
print(f"Unknown option: {sys.argv[1]}")
|
||||||
|
print("Use --help for usage information")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Verify exercise directories exist
|
||||||
|
ex0_dir = Path("ex0")
|
||||||
|
ex1_dir = Path("ex1")
|
||||||
|
ex2_dir = Path("ex2")
|
||||||
|
|
||||||
|
missing_dirs = []
|
||||||
|
if not ex0_dir.exists():
|
||||||
|
missing_dirs.append("ex0/")
|
||||||
|
if not ex1_dir.exists():
|
||||||
|
missing_dirs.append("ex1/")
|
||||||
|
if not ex2_dir.exists():
|
||||||
|
missing_dirs.append("ex2/")
|
||||||
|
|
||||||
|
if missing_dirs:
|
||||||
|
print("❌ Exercise directories not found!")
|
||||||
|
print(f"Missing: {', '.join(missing_dirs)}")
|
||||||
|
print("Please ensure your exercise files are in "
|
||||||
|
"ex0/, ex1/, and ex2/ directories")
|
||||||
|
print("\nUse --help for more information")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
tester = PolymorphismTester()
|
||||||
|
success = tester.run_all_tests(verbose=verbose)
|
||||||
|
|
||||||
|
# Exit with appropriate code
|
||||||
|
sys.exit(0 if success else 1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -1,160 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
from typing import Any
|
|
||||||
from typing_extensions import override
|
|
||||||
|
|
||||||
|
|
||||||
class DataProcessor(ABC):
|
|
||||||
@abstractmethod
|
|
||||||
def process(self, data: Any) -> str:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def validate(self, data: Any) -> bool:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def format_output(self, result: str) -> str:
|
|
||||||
return f"Processed output: {result}"
|
|
||||||
|
|
||||||
|
|
||||||
class NumericProcessor(DataProcessor):
|
|
||||||
def process(self, data: Any) -> str:
|
|
||||||
try:
|
|
||||||
return f"{data}"
|
|
||||||
except Exception as err:
|
|
||||||
print(err)
|
|
||||||
return "error"
|
|
||||||
|
|
||||||
def validate(self, data: Any) -> bool:
|
|
||||||
try:
|
|
||||||
for n in data:
|
|
||||||
if type(n).__name__ != "int":
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
|
|
||||||
@override
|
|
||||||
def format_output(self, result: str) -> str:
|
|
||||||
try:
|
|
||||||
to_processed = result.split(", ")
|
|
||||||
res_int: list[int] = []
|
|
||||||
for n in to_processed:
|
|
||||||
n = n.replace("[", "").replace("]", "")
|
|
||||||
if n.isnumeric():
|
|
||||||
res_int = res_int + [int(n)]
|
|
||||||
avg = sum(res_int) / len(res_int)
|
|
||||||
return f"Processed {len(res_int)} numeric value,\
|
|
||||||
sum={sum(res_int)}, avg={avg:.2f}"
|
|
||||||
except Exception as err:
|
|
||||||
print(f"NumericProcessor / format_output: {err}")
|
|
||||||
return "error"
|
|
||||||
|
|
||||||
|
|
||||||
class TextProcessor(DataProcessor):
|
|
||||||
def process(self, data: Any) -> str:
|
|
||||||
return str(data)
|
|
||||||
|
|
||||||
def validate(self, data: Any) -> bool:
|
|
||||||
try:
|
|
||||||
for char in data:
|
|
||||||
if type(char).__name__ != "str":
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
|
|
||||||
@override
|
|
||||||
def format_output(self, result: str) -> str:
|
|
||||||
return f"Processed text: {len(result)}\
|
|
||||||
characters, {len(result.split(' '))} words"
|
|
||||||
|
|
||||||
|
|
||||||
class LogProcessor(DataProcessor):
|
|
||||||
def process(self, data: Any) -> str:
|
|
||||||
try:
|
|
||||||
return f"{data[0]}: {data[1]}"
|
|
||||||
except Exception as err:
|
|
||||||
print(err)
|
|
||||||
return "error"
|
|
||||||
|
|
||||||
def validate(self, data: Any) -> bool:
|
|
||||||
try:
|
|
||||||
if len(data) == 2:
|
|
||||||
for n in data:
|
|
||||||
if type(n).__name__ != "str":
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
|
|
||||||
@override
|
|
||||||
def format_output(self, result: str) -> str:
|
|
||||||
try:
|
|
||||||
log_res = result.split(":")[0]
|
|
||||||
if log_res == "ERROR":
|
|
||||||
header = "[ALERT]"
|
|
||||||
elif log_res == "INFO":
|
|
||||||
header = "[INFO]"
|
|
||||||
elif log_res == "SUCCESS":
|
|
||||||
header = "[SUCCESS]"
|
|
||||||
else:
|
|
||||||
header = "[UNKNOWN]"
|
|
||||||
return (
|
|
||||||
f"{header} {result.split(':', 1)[0]}{result.split(':', 1)[1]}"
|
|
||||||
)
|
|
||||||
except Exception as err:
|
|
||||||
print(err)
|
|
||||||
return "error"
|
|
||||||
|
|
||||||
|
|
||||||
def class_tester() -> None:
|
|
||||||
print("=== CODE NEXUS - DATA PROCESSOR FOUNDATION ===\n")
|
|
||||||
print("Initializing Numeric Processor...")
|
|
||||||
data = [1, 2, 3, 4, 5]
|
|
||||||
processor = NumericProcessor()
|
|
||||||
res = processor.process(data)
|
|
||||||
is_valid = processor.validate(data)
|
|
||||||
formatted_output = processor.format_output(res)
|
|
||||||
print(f"processing data: {res}")
|
|
||||||
if is_valid:
|
|
||||||
print("Validation: Numeric data verified")
|
|
||||||
else:
|
|
||||||
print("Validation: Invalid numeric data")
|
|
||||||
print(f"Output: {formatted_output}")
|
|
||||||
print("\nInitializing Text Processor...")
|
|
||||||
data = "Hello Nexus World"
|
|
||||||
processor = TextProcessor()
|
|
||||||
res = processor.process(data)
|
|
||||||
is_valid = processor.validate(data)
|
|
||||||
formatted_output = processor.format_output(res)
|
|
||||||
print(f"processing data: {res}")
|
|
||||||
if is_valid:
|
|
||||||
print("Validation: Text data verified")
|
|
||||||
else:
|
|
||||||
print("Validation: Invalid text data")
|
|
||||||
print(f"Output: {formatted_output}")
|
|
||||||
print("\nInitializing Log Processor...")
|
|
||||||
data = ["ERROR", "Connection timeout"]
|
|
||||||
processor = LogProcessor()
|
|
||||||
res = processor.process(data)
|
|
||||||
is_valid = processor.validate(data)
|
|
||||||
formatted_output = processor.format_output(res)
|
|
||||||
print(f"processing data: {res}")
|
|
||||||
if is_valid:
|
|
||||||
print("Validation: Log data verified")
|
|
||||||
else:
|
|
||||||
print("Validation: Invalid log data")
|
|
||||||
print(f"Output: {formatted_output}")
|
|
||||||
print("\n=== Polymorphic Processing Demo ===\n")
|
|
||||||
print(f"Result 1:\
|
|
||||||
{NumericProcessor().format_output('[1, 2, 3]')}")
|
|
||||||
print(f"Result 2:\
|
|
||||||
{TextProcessor().format_output('Hello World !')}")
|
|
||||||
print(f"Result 3:\
|
|
||||||
{LogProcessor().format_output('INFO: level detected: System ready')}")
|
|
||||||
print("\nFoundation systems online. Nexus ready for advanced streams.")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
class_tester()
|
|
||||||
Reference in New Issue
Block a user